var/home/core/zuul-output/0000755000175000017500000000000015066476443014544 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066515224015477 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005412630215066515214017705 0ustar rootrootSep 29 12:40:11 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 12:40:11 crc restorecon[4574]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:11 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 12:40:12 crc restorecon[4574]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 12:40:13 crc kubenswrapper[4611]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 12:40:13 crc kubenswrapper[4611]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 12:40:13 crc kubenswrapper[4611]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 12:40:13 crc kubenswrapper[4611]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 12:40:13 crc kubenswrapper[4611]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 12:40:13 crc kubenswrapper[4611]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.523564 4611 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531368 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531405 4611 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531409 4611 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531414 4611 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531420 4611 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531424 4611 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531427 4611 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531431 4611 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531438 4611 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531444 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531451 4611 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531458 4611 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531464 4611 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531470 4611 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531474 4611 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531478 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531482 4611 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531487 4611 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531491 4611 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531496 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531499 4611 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531503 4611 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531507 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531511 4611 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531515 4611 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531519 4611 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531522 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531526 4611 feature_gate.go:330] unrecognized feature gate: Example Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531533 4611 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531546 4611 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531551 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531555 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531559 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531563 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531568 4611 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531573 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531577 4611 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531582 4611 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531587 4611 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531592 4611 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531596 4611 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531600 4611 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531605 4611 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531612 4611 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531617 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531637 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531642 4611 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531646 4611 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531652 4611 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531656 4611 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531661 4611 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531665 4611 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531670 4611 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531674 4611 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531678 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531683 4611 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531687 4611 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531691 4611 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531695 4611 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531699 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531703 4611 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531708 4611 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531712 4611 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531716 4611 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531720 4611 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531725 4611 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531732 4611 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531737 4611 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531741 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531745 4611 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.531749 4611 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532413 4611 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532430 4611 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532439 4611 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532445 4611 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532451 4611 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532456 4611 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532463 4611 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532468 4611 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532472 4611 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532476 4611 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532481 4611 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532494 4611 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532499 4611 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532503 4611 flags.go:64] FLAG: --cgroup-root="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532508 4611 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532512 4611 flags.go:64] FLAG: --client-ca-file="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532516 4611 flags.go:64] FLAG: --cloud-config="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532520 4611 flags.go:64] FLAG: --cloud-provider="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532524 4611 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532532 4611 flags.go:64] FLAG: --cluster-domain="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532536 4611 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532540 4611 flags.go:64] FLAG: --config-dir="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532544 4611 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532549 4611 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532555 4611 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532559 4611 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532563 4611 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532569 4611 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532574 4611 flags.go:64] FLAG: --contention-profiling="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532583 4611 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532591 4611 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532598 4611 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532603 4611 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532610 4611 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532616 4611 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532620 4611 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532641 4611 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532646 4611 flags.go:64] FLAG: --enable-server="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532650 4611 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532661 4611 flags.go:64] FLAG: --event-burst="100" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532666 4611 flags.go:64] FLAG: --event-qps="50" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532673 4611 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532678 4611 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532684 4611 flags.go:64] FLAG: --eviction-hard="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532696 4611 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532701 4611 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532707 4611 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532719 4611 flags.go:64] FLAG: --eviction-soft="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532724 4611 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532729 4611 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532734 4611 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532739 4611 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532744 4611 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532749 4611 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532754 4611 flags.go:64] FLAG: --feature-gates="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532761 4611 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532767 4611 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532773 4611 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532778 4611 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532783 4611 flags.go:64] FLAG: --healthz-port="10248" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532788 4611 flags.go:64] FLAG: --help="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532792 4611 flags.go:64] FLAG: --hostname-override="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532797 4611 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532801 4611 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532806 4611 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532810 4611 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532814 4611 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532818 4611 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532823 4611 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532827 4611 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532832 4611 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532837 4611 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532842 4611 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532846 4611 flags.go:64] FLAG: --kube-reserved="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532851 4611 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532858 4611 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532863 4611 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532868 4611 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532874 4611 flags.go:64] FLAG: --lock-file="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532878 4611 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532882 4611 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532887 4611 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532895 4611 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532903 4611 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532907 4611 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532912 4611 flags.go:64] FLAG: --logging-format="text" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532917 4611 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532922 4611 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532927 4611 flags.go:64] FLAG: --manifest-url="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532931 4611 flags.go:64] FLAG: --manifest-url-header="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532943 4611 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532948 4611 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532954 4611 flags.go:64] FLAG: --max-pods="110" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532958 4611 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532964 4611 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532969 4611 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532975 4611 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532980 4611 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532985 4611 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.532991 4611 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533003 4611 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533008 4611 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533012 4611 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533022 4611 flags.go:64] FLAG: --pod-cidr="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533026 4611 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533033 4611 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533038 4611 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533042 4611 flags.go:64] FLAG: --pods-per-core="0" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533047 4611 flags.go:64] FLAG: --port="10250" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533052 4611 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533057 4611 flags.go:64] FLAG: --provider-id="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533061 4611 flags.go:64] FLAG: --qos-reserved="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533065 4611 flags.go:64] FLAG: --read-only-port="10255" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533071 4611 flags.go:64] FLAG: --register-node="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533075 4611 flags.go:64] FLAG: --register-schedulable="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533083 4611 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533091 4611 flags.go:64] FLAG: --registry-burst="10" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533096 4611 flags.go:64] FLAG: --registry-qps="5" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533100 4611 flags.go:64] FLAG: --reserved-cpus="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533106 4611 flags.go:64] FLAG: --reserved-memory="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533113 4611 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533119 4611 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533127 4611 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533132 4611 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533137 4611 flags.go:64] FLAG: --runonce="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533142 4611 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533151 4611 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533156 4611 flags.go:64] FLAG: --seccomp-default="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533161 4611 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533165 4611 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533170 4611 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533174 4611 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533179 4611 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533183 4611 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533188 4611 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533193 4611 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533198 4611 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533203 4611 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533208 4611 flags.go:64] FLAG: --system-cgroups="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533213 4611 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533224 4611 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533229 4611 flags.go:64] FLAG: --tls-cert-file="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533234 4611 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533242 4611 flags.go:64] FLAG: --tls-min-version="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533247 4611 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533253 4611 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533258 4611 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533263 4611 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533268 4611 flags.go:64] FLAG: --v="2" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533277 4611 flags.go:64] FLAG: --version="false" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533284 4611 flags.go:64] FLAG: --vmodule="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533291 4611 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533296 4611 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533435 4611 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533444 4611 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533449 4611 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533453 4611 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533456 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533460 4611 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533463 4611 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533467 4611 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533471 4611 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533474 4611 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533478 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533482 4611 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533486 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533490 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533493 4611 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533497 4611 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533500 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533504 4611 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533508 4611 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533512 4611 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533515 4611 feature_gate.go:330] unrecognized feature gate: Example Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533519 4611 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533522 4611 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533527 4611 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533532 4611 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533536 4611 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533539 4611 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533544 4611 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533548 4611 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533552 4611 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533556 4611 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533560 4611 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533564 4611 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533571 4611 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533575 4611 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533580 4611 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533584 4611 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533588 4611 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533593 4611 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533598 4611 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533602 4611 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533606 4611 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533612 4611 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533616 4611 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533620 4611 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533641 4611 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533646 4611 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533650 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533654 4611 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533658 4611 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533662 4611 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533666 4611 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533670 4611 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533674 4611 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533677 4611 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533681 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533685 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533689 4611 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533692 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533696 4611 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533700 4611 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533704 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533708 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533712 4611 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533716 4611 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533723 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533728 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533732 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533736 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533740 4611 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.533744 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.533752 4611 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.541227 4611 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.541273 4611 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541342 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541351 4611 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541356 4611 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541361 4611 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541364 4611 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541369 4611 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541375 4611 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541379 4611 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541383 4611 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541387 4611 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541391 4611 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541394 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541398 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541402 4611 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541405 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541409 4611 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541413 4611 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541417 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541421 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541426 4611 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541430 4611 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541434 4611 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541438 4611 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541441 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541445 4611 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541449 4611 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541452 4611 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541456 4611 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541459 4611 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541463 4611 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541467 4611 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541470 4611 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541474 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541477 4611 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541482 4611 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541486 4611 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541490 4611 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541495 4611 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541499 4611 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541503 4611 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541507 4611 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541510 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541514 4611 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541517 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541521 4611 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541524 4611 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541528 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541532 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541535 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541539 4611 feature_gate.go:330] unrecognized feature gate: Example Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541542 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541546 4611 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541550 4611 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541554 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541559 4611 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541564 4611 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541567 4611 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541571 4611 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541575 4611 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541579 4611 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541584 4611 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541588 4611 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541592 4611 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541596 4611 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541599 4611 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541603 4611 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541607 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541611 4611 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541614 4611 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541634 4611 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541640 4611 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.541648 4611 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541765 4611 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541771 4611 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541776 4611 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541781 4611 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541786 4611 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541790 4611 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541794 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541798 4611 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541802 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541806 4611 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541810 4611 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541814 4611 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541818 4611 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541821 4611 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541827 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541831 4611 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541835 4611 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541839 4611 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541843 4611 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541848 4611 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541851 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541855 4611 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541859 4611 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541863 4611 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541867 4611 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541871 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541874 4611 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541878 4611 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541882 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541886 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541889 4611 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541893 4611 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541897 4611 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541900 4611 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541905 4611 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541910 4611 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541914 4611 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541918 4611 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541922 4611 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541926 4611 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541930 4611 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541933 4611 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541938 4611 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541942 4611 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541946 4611 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541950 4611 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541954 4611 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541957 4611 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541961 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541965 4611 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541968 4611 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541972 4611 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541976 4611 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541982 4611 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541986 4611 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.541992 4611 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542003 4611 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542009 4611 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542015 4611 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542020 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542024 4611 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542029 4611 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542034 4611 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542038 4611 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542042 4611 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542047 4611 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542052 4611 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542056 4611 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542061 4611 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542065 4611 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.542071 4611 feature_gate.go:330] unrecognized feature gate: Example Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.542079 4611 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.542284 4611 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.548660 4611 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.548759 4611 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.550126 4611 server.go:997] "Starting client certificate rotation" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.550163 4611 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.550335 4611 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-24 16:03:08.795373364 +0000 UTC Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.550414 4611 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1347h22m55.244961687s for next certificate rotation Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.586869 4611 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.589334 4611 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.600080 4611 log.go:25] "Validated CRI v1 runtime API" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.630119 4611 log.go:25] "Validated CRI v1 image API" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.631599 4611 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.637284 4611 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-12-34-22-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.637327 4611 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.648676 4611 manager.go:217] Machine: {Timestamp:2025-09-29 12:40:13.647203175 +0000 UTC m=+0.538722811 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:b577e751-f004-45f9-a489-c870d2ba486c BootID:e5548895-0aa9-44f2-872c-0702b2be968c Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:99:da:cb Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:99:da:cb Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:53:af:bd Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:52:96:33 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:80:71:6e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:b0:ff:ad Speed:-1 Mtu:1496} {Name:eth10 MacAddress:56:c3:57:a3:37:74 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8a:77:67:46:27:df Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.648895 4611 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.649046 4611 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.649321 4611 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.649483 4611 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.649515 4611 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.650280 4611 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.650295 4611 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.650765 4611 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.650792 4611 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.651403 4611 state_mem.go:36] "Initialized new in-memory state store" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.651487 4611 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.657407 4611 kubelet.go:418] "Attempting to sync node with API server" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.657430 4611 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.657458 4611 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.657474 4611 kubelet.go:324] "Adding apiserver pod source" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.657488 4611 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.664763 4611 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.665978 4611 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.667761 4611 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.668292 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.668329 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.668409 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.668419 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669450 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669479 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669488 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669498 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669512 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669520 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669528 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669542 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669552 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669561 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669575 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.669583 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.671379 4611 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.671868 4611 server.go:1280] "Started kubelet" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.672043 4611 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.672456 4611 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.672815 4611 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 12:40:13 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.673369 4611 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.674010 4611 server.go:460] "Adding debug handlers to kubelet server" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.674981 4611 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.675013 4611 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.675050 4611 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 14:14:20.861874008 +0000 UTC Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.675095 4611 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2041h34m7.186781608s for next certificate rotation Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.675269 4611 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.675750 4611 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.675769 4611 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.675810 4611 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.676917 4611 factory.go:55] Registering systemd factory Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.676942 4611 factory.go:221] Registration of the systemd container factory successfully Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.685363 4611 factory.go:153] Registering CRI-O factory Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.686552 4611 factory.go:221] Registration of the crio container factory successfully Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.686673 4611 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.686717 4611 factory.go:103] Registering Raw factory Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.686734 4611 manager.go:1196] Started watching for new ooms in manager Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.688100 4611 manager.go:319] Starting recovery of all containers Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.688321 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.688397 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.689807 4611 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.74:6443: connect: connection refused" interval="200ms" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.688430 4611 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.74:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869c140d5ace002 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 12:40:13.671833602 +0000 UTC m=+0.563353218,LastTimestamp:2025-09-29 12:40:13.671833602 +0000 UTC m=+0.563353218,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695647 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695699 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695727 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695738 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695747 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695759 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695768 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695778 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695808 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695819 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695828 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695838 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695879 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695894 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695903 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695914 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695925 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695937 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695970 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695981 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.695993 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696001 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696011 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696058 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696088 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696116 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696222 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696235 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696260 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696286 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696298 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696308 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696318 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696329 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696339 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696365 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696376 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696386 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696396 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696406 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696416 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696443 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696454 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696464 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696475 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696485 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696495 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696521 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696532 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696541 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696550 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696560 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696610 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696641 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696653 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696664 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696676 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696687 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696695 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696724 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696736 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696745 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696753 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696763 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696773 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.696799 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700213 4611 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700238 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700250 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700260 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700271 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700280 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700290 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700302 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700311 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700320 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700329 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700339 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700355 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700364 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700374 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700383 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700394 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700404 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700414 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700423 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700432 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700441 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700451 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700464 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700473 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700483 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700491 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700501 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700511 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700520 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700532 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700540 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700550 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700559 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700569 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700578 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700587 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700596 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700606 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700635 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700646 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700657 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700668 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700681 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700693 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700704 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700714 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700725 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700736 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700745 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700754 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700765 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700775 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700784 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700802 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700814 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700823 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700832 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700841 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700850 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700859 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700867 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700876 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700885 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700894 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700902 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700911 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700920 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700929 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700938 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700947 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700956 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700965 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700973 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700982 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700990 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.700998 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701006 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701014 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701023 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701032 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701040 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701050 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701059 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701073 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701082 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701092 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701102 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701111 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701121 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701130 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701139 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701148 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701156 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701166 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701175 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701184 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701192 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701201 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701210 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701219 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701229 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701239 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701248 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701259 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701267 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701276 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701285 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701294 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701303 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701312 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701321 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701330 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701339 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701349 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701359 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701369 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701378 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701388 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701397 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701408 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701417 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701425 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701434 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701444 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701453 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701462 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701472 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701481 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701490 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701498 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701507 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701517 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701526 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701534 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701546 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701558 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701570 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701580 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701592 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701602 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701613 4611 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701650 4611 reconstruct.go:97] "Volume reconstruction finished" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.701660 4611 reconciler.go:26] "Reconciler: start to sync state" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.709987 4611 manager.go:324] Recovery completed Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.718691 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.719994 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.720136 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.720258 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.721367 4611 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.721458 4611 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.721564 4611 state_mem.go:36] "Initialized new in-memory state store" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.732424 4611 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.734942 4611 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.734995 4611 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.735033 4611 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.735039 4611 policy_none.go:49] "None policy: Start" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.735082 4611 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 12:40:13 crc kubenswrapper[4611]: W0929 12:40:13.735691 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.735758 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.736118 4611 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.736227 4611 state_mem.go:35] "Initializing new in-memory state store" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.775480 4611 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.806802 4611 manager.go:334] "Starting Device Plugin manager" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.806847 4611 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.806859 4611 server.go:79] "Starting device plugin registration server" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.807658 4611 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.807674 4611 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.808634 4611 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.808972 4611 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.808986 4611 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.813734 4611 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.835215 4611 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.835306 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.836677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.836707 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.836734 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.836849 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.837453 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.837502 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.837516 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.838001 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.838052 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.838094 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.838169 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.838195 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.839459 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.839507 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.839520 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.839888 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.839911 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.839924 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.841602 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.841659 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.841672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.841888 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.842567 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.842656 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843643 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843649 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843699 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843702 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843719 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.843788 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844391 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844435 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844483 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844507 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844694 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.844727 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.845175 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.845202 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.845213 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.845288 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.845305 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.845316 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.890745 4611 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.74:6443: connect: connection refused" interval="400ms" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903494 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903525 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903544 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903559 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903573 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903590 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903605 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903646 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903677 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903736 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903775 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903807 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903827 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903865 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.903897 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.908384 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.909318 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.909353 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.909365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:13 crc kubenswrapper[4611]: I0929 12:40:13.909387 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:13 crc kubenswrapper[4611]: E0929 12:40:13.909783 4611 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.74:6443: connect: connection refused" node="crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005123 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005175 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005197 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005214 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005236 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005255 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005269 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005283 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005299 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005318 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005331 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005345 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005359 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005358 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005386 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005415 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005376 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005440 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005476 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005501 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005458 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005352 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005406 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005503 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005508 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005541 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005517 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005556 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005554 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.005451 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.110007 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.110977 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.111021 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.111032 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.111057 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:14 crc kubenswrapper[4611]: E0929 12:40:14.111328 4611 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.74:6443: connect: connection refused" node="crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.163900 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.171408 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.189040 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.206382 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.210682 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.212750 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-04dd2f43d9c263a13ef7f42f035687ccbaf39117dd2c058209039780d91468c2 WatchSource:0}: Error finding container 04dd2f43d9c263a13ef7f42f035687ccbaf39117dd2c058209039780d91468c2: Status 404 returned error can't find the container with id 04dd2f43d9c263a13ef7f42f035687ccbaf39117dd2c058209039780d91468c2 Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.223530 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-21040a5aec026916ba1bb3e43f1e8e519b5a6cd6984b77dbe308312798fd0673 WatchSource:0}: Error finding container 21040a5aec026916ba1bb3e43f1e8e519b5a6cd6984b77dbe308312798fd0673: Status 404 returned error can't find the container with id 21040a5aec026916ba1bb3e43f1e8e519b5a6cd6984b77dbe308312798fd0673 Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.228677 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-74279369607a56b6ea8035a12ab1e342f1fb7848641926ae2f0b91976d9514d3 WatchSource:0}: Error finding container 74279369607a56b6ea8035a12ab1e342f1fb7848641926ae2f0b91976d9514d3: Status 404 returned error can't find the container with id 74279369607a56b6ea8035a12ab1e342f1fb7848641926ae2f0b91976d9514d3 Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.235753 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-eba62025986a9293f4e16c9d4db715ac62f950b5ed546d1f83f67fe9cc4512eb WatchSource:0}: Error finding container eba62025986a9293f4e16c9d4db715ac62f950b5ed546d1f83f67fe9cc4512eb: Status 404 returned error can't find the container with id eba62025986a9293f4e16c9d4db715ac62f950b5ed546d1f83f67fe9cc4512eb Sep 29 12:40:14 crc kubenswrapper[4611]: E0929 12:40:14.292267 4611 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.74:6443: connect: connection refused" interval="800ms" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.511834 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.512848 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.512898 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.512911 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.512938 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:14 crc kubenswrapper[4611]: E0929 12:40:14.513280 4611 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.74:6443: connect: connection refused" node="crc" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.674734 4611 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.687535 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:14 crc kubenswrapper[4611]: E0929 12:40:14.687646 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.738897 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eba62025986a9293f4e16c9d4db715ac62f950b5ed546d1f83f67fe9cc4512eb"} Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.739591 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"74279369607a56b6ea8035a12ab1e342f1fb7848641926ae2f0b91976d9514d3"} Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.740341 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"21040a5aec026916ba1bb3e43f1e8e519b5a6cd6984b77dbe308312798fd0673"} Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.742037 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:14 crc kubenswrapper[4611]: E0929 12:40:14.742108 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.742705 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"04dd2f43d9c263a13ef7f42f035687ccbaf39117dd2c058209039780d91468c2"} Sep 29 12:40:14 crc kubenswrapper[4611]: I0929 12:40:14.743651 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f1c0ec3789a289b38f085aca248f9663de7bf0160f8e3f306df592bca53e171d"} Sep 29 12:40:14 crc kubenswrapper[4611]: W0929 12:40:14.798090 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:14 crc kubenswrapper[4611]: E0929 12:40:14.798171 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:15 crc kubenswrapper[4611]: E0929 12:40:15.093681 4611 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.74:6443: connect: connection refused" interval="1.6s" Sep 29 12:40:15 crc kubenswrapper[4611]: W0929 12:40:15.211284 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:15 crc kubenswrapper[4611]: E0929 12:40:15.211692 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.314070 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.316139 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.316186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.316196 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.316219 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:15 crc kubenswrapper[4611]: E0929 12:40:15.316726 4611 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.74:6443: connect: connection refused" node="crc" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.674786 4611 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.747131 4611 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be" exitCode=0 Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.747180 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.747219 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.748256 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.748282 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.748291 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.749125 4611 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301" exitCode=0 Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.749165 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.749229 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.749776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.749803 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.749813 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.750935 4611 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28" exitCode=0 Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.750988 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.751140 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.752693 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.752723 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.752735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.754181 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.754204 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.754216 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.754230 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.754237 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.754986 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.755018 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.755029 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.755793 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.756543 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.756670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.756665 4611 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316" exitCode=0 Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.756681 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.756699 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316"} Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.756825 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.759795 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.759825 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:15 crc kubenswrapper[4611]: I0929 12:40:15.759835 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.674494 4611 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:16 crc kubenswrapper[4611]: E0929 12:40:16.694963 4611 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.74:6443: connect: connection refused" interval="3.2s" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.762161 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.762209 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.762218 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.762227 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.763688 4611 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1" exitCode=0 Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.763725 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.763830 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.764533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.764569 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.764581 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.765995 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.766036 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.767131 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.767161 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.767172 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.768936 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.768977 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.768987 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.768991 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260"} Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.768953 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.769810 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.769841 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.769851 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.770169 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.770201 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.770212 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.916908 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.917924 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.917974 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.917987 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:16 crc kubenswrapper[4611]: I0929 12:40:16.918018 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:16 crc kubenswrapper[4611]: E0929 12:40:16.918505 4611 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.74:6443: connect: connection refused" node="crc" Sep 29 12:40:17 crc kubenswrapper[4611]: W0929 12:40:17.075589 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.74:6443: connect: connection refused Sep 29 12:40:17 crc kubenswrapper[4611]: E0929 12:40:17.075710 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.74:6443: connect: connection refused" logger="UnhandledError" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.776132 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.776121 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9"} Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.777083 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.777121 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.777130 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.779248 4611 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237" exitCode=0 Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.779302 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237"} Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.779347 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.779695 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.779737 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.779893 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.780489 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.780520 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.780533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.781168 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.781197 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.781209 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.781383 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.781477 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:17 crc kubenswrapper[4611]: I0929 12:40:17.781537 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.784687 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d"} Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.784737 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.784746 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748"} Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.784760 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314"} Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.784778 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.785647 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.785676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:18 crc kubenswrapper[4611]: I0929 12:40:18.785687 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.372281 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.372433 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.373334 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.373365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.373373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.377493 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.789853 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.789912 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.789835 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3"} Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790341 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a"} Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790645 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790665 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790804 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790852 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.790866 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:19 crc kubenswrapper[4611]: I0929 12:40:19.827549 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.119366 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.121069 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.121128 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.121144 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.121218 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.222737 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.222925 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.223818 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.223854 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.223862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.306140 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.342416 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.342552 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.342589 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.343645 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.343670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.343679 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.791682 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.791681 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.791846 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.792470 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.792501 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.792512 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.792584 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.792604 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.792614 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:20 crc kubenswrapper[4611]: I0929 12:40:20.959714 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.122586 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.122778 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.122814 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.123816 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.123861 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.123880 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.530745 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.794885 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.794918 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.796712 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.796740 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.796750 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.797046 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.797077 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:21 crc kubenswrapper[4611]: I0929 12:40:21.797094 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.781297 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.781508 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.782614 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.782726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.782737 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.797480 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.798605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.798689 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:22 crc kubenswrapper[4611]: I0929 12:40:22.798699 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.216058 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.216351 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.218031 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.218077 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.218089 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:23 crc kubenswrapper[4611]: E0929 12:40:23.813852 4611 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.960220 4611 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 12:40:23 crc kubenswrapper[4611]: I0929 12:40:23.960306 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 12:40:27 crc kubenswrapper[4611]: W0929 12:40:27.579503 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.579608 4611 trace.go:236] Trace[1496075721]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 12:40:17.578) (total time: 10001ms): Sep 29 12:40:27 crc kubenswrapper[4611]: Trace[1496075721]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (12:40:27.579) Sep 29 12:40:27 crc kubenswrapper[4611]: Trace[1496075721]: [10.00138792s] [10.00138792s] END Sep 29 12:40:27 crc kubenswrapper[4611]: E0929 12:40:27.579646 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 12:40:27 crc kubenswrapper[4611]: W0929 12:40:27.618556 4611 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.618697 4611 trace.go:236] Trace[1053433938]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 12:40:17.617) (total time: 10001ms): Sep 29 12:40:27 crc kubenswrapper[4611]: Trace[1053433938]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (12:40:27.618) Sep 29 12:40:27 crc kubenswrapper[4611]: Trace[1053433938]: [10.001251235s] [10.001251235s] END Sep 29 12:40:27 crc kubenswrapper[4611]: E0929 12:40:27.618733 4611 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.680319 4611 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.680369 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.687614 4611 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.687704 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.810868 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.812985 4611 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9" exitCode=255 Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.813032 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9"} Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.813179 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.814268 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.814514 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.814528 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:27 crc kubenswrapper[4611]: I0929 12:40:27.815132 4611 scope.go:117] "RemoveContainer" containerID="667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9" Sep 29 12:40:28 crc kubenswrapper[4611]: I0929 12:40:28.817266 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 12:40:28 crc kubenswrapper[4611]: I0929 12:40:28.819349 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82"} Sep 29 12:40:28 crc kubenswrapper[4611]: I0929 12:40:28.819557 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:28 crc kubenswrapper[4611]: I0929 12:40:28.820764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:28 crc kubenswrapper[4611]: I0929 12:40:28.820811 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:28 crc kubenswrapper[4611]: I0929 12:40:28.820821 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.128272 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.128796 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.128945 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.129845 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.129881 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.129891 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.133125 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.360555 4611 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.536551 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.536786 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.538160 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.538207 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.538219 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.825525 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.826468 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.826516 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:31 crc kubenswrapper[4611]: I0929 12:40:31.826531 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.155611 4611 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.665024 4611 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.666567 4611 trace.go:236] Trace[670149891]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 12:40:20.805) (total time: 11860ms): Sep 29 12:40:32 crc kubenswrapper[4611]: Trace[670149891]: ---"Objects listed" error: 11860ms (12:40:32.666) Sep 29 12:40:32 crc kubenswrapper[4611]: Trace[670149891]: [11.860582006s] [11.860582006s] END Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.666594 4611 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.666972 4611 trace.go:236] Trace[1961797829]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 12:40:17.971) (total time: 14695ms): Sep 29 12:40:32 crc kubenswrapper[4611]: Trace[1961797829]: ---"Objects listed" error: 14695ms (12:40:32.666) Sep 29 12:40:32 crc kubenswrapper[4611]: Trace[1961797829]: [14.695090311s] [14.695090311s] END Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.666984 4611 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.668201 4611 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.669706 4611 apiserver.go:52] "Watching apiserver" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.693300 4611 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.709713 4611 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.709989 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.710387 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.710459 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.710482 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.710593 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.710714 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.710788 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.710858 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.711100 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.711148 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.713294 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.713727 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.713828 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.714033 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.714345 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.714392 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.714348 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.714582 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.717590 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.738993 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.741781 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.750330 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.755495 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.756382 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.766856 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.776166 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.776467 4611 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.787840 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.799429 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.812972 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.822458 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.835218 4611 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870033 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870089 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870111 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870138 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870171 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870195 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870220 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870245 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870295 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870322 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870344 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870364 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870386 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870411 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870433 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870456 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870479 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870509 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870535 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870556 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870552 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870578 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870637 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870667 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870653 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870693 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870785 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870814 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870837 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870861 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870882 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870908 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870933 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870956 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870976 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.870997 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871018 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871041 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871070 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871069 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871096 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871122 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871177 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871196 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871217 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871242 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871263 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871264 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871288 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871336 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871360 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871380 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871399 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871420 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871415 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871440 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871487 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871505 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871542 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871573 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871599 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871616 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871608 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871666 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871690 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871689 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871706 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871714 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871752 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871793 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871812 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871827 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871850 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871873 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871874 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871897 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871949 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871977 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.871984 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872004 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872030 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872054 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872081 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872087 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872111 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872111 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872125 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872126 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872138 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872165 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872190 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872215 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872245 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872273 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872276 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872279 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872296 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872321 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872345 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872368 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872390 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872414 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872439 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872463 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872487 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872543 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872569 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872591 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872652 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872685 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872711 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872734 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872758 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872780 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872806 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872829 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872854 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872874 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872895 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872920 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872942 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872966 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872992 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873018 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873041 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873063 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873093 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873116 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873141 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873166 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873192 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873216 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873239 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873261 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873279 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873302 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873322 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873345 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873370 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873396 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873423 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873450 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873474 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873521 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873546 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873569 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873597 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873637 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873662 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873691 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873717 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873741 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873764 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873788 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873814 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873927 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873957 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873982 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874011 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874037 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874077 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874107 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874137 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874165 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874190 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874217 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874241 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874269 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874293 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872294 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872418 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872489 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872497 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872552 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874375 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872797 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872876 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872939 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872937 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872950 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872997 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873117 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873132 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873609 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.873853 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874000 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874041 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874208 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874302 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.872775 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874643 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.875204 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.875805 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.875952 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.876448 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.876473 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.876603 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.876916 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877044 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877088 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877360 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877405 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877456 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877745 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.877938 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.878938 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.880530 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:40:33.380508903 +0000 UTC m=+20.272028509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.880640 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.880692 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.880924 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.880958 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.881630 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.874314 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.881869 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882255 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882490 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882482 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882636 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882853 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882890 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.882874 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.883096 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.883225 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.883281 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.883584 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.884074 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.884104 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.884451 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.884525 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.884738 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.884754 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.885347 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.885404 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889562 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889711 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889810 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889835 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889867 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889962 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889969 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889977 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890028 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.889987 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890179 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890340 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890633 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890874 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890886 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890755 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.890825 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.891187 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.891296 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.891465 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.892654 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.892762 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.893133 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.893242 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.893485 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.893696 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.893586 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894158 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894192 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894216 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894237 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894256 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894277 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894348 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894368 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894388 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894405 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894422 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894441 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894457 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894473 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894491 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894507 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894524 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894542 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894559 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894577 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894595 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894611 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894645 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894665 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894685 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894702 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894719 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894737 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894754 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894771 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894788 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894803 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894822 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894845 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894862 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894879 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894896 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894913 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894931 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894949 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894967 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894985 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895004 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895020 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895035 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895087 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895108 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895126 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895145 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895165 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895191 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895210 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895246 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895274 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895296 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895315 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895336 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895355 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895373 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895503 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895514 4611 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895523 4611 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895533 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895542 4611 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895552 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895561 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895569 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895578 4611 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895589 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895598 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895607 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895616 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895639 4611 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895649 4611 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895658 4611 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895668 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895677 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895687 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895696 4611 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895705 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895714 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895723 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895733 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895742 4611 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895751 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895786 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895796 4611 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895806 4611 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895815 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895824 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895833 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895843 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895851 4611 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895861 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895870 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895879 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895888 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895897 4611 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895906 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895915 4611 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895924 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895934 4611 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895943 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895954 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895964 4611 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895972 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895982 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895991 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895999 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896008 4611 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896017 4611 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896027 4611 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896036 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896044 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896054 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896062 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896070 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896079 4611 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896087 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896096 4611 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896106 4611 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896115 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896125 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896134 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896142 4611 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896151 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896161 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896169 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896178 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896211 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896221 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896230 4611 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896239 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896248 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896257 4611 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896267 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896278 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896290 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896302 4611 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896312 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896321 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896330 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896340 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896350 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896360 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896370 4611 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896379 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896387 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896402 4611 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896411 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896420 4611 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896428 4611 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896439 4611 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896449 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896462 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896470 4611 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896480 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896489 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896498 4611 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896507 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896515 4611 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896524 4611 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896532 4611 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896541 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896549 4611 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896557 4611 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896566 4611 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896575 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894681 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898062 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.894944 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895024 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895192 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895251 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895396 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895828 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895832 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.895899 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896273 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896334 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896365 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896396 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.896718 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896789 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896847 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896957 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.896997 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897120 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897201 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897414 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897591 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897452 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897688 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.897954 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898003 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898282 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898380 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898606 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898690 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898775 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898966 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899002 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.898994 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899068 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899090 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899108 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899191 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899292 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899298 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899379 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.899565 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:33.399517574 +0000 UTC m=+20.291037280 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899593 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899676 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899717 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.899955 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.900046 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.900272 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.900308 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.900672 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.901684 4611 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.903095 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.903238 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.906014 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:33.403269363 +0000 UTC m=+20.294788969 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.907301 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.907690 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.907841 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.908044 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.908142 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.908335 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.908696 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.908784 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.909034 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.909172 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.909808 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.909955 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.910638 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.910840 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.911220 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.911425 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.912387 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.912772 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.912802 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.912817 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.912932 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:33.412879577 +0000 UTC m=+20.304399293 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.913054 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.916203 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.916691 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.917988 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.918907 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.920895 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.920950 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.922451 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.923380 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.924944 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.925167 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.927746 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.927776 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.927790 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:32 crc kubenswrapper[4611]: E0929 12:40:32.927854 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:33.427824809 +0000 UTC m=+20.319344415 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.928393 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.931249 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.933169 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.933316 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.933380 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.933517 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.933698 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.934000 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.934069 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.935135 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.935437 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.936215 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.941288 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.945870 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.956223 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.958930 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.997562 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.997840 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998027 4611 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998106 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998180 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998246 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998305 4611 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998370 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998433 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998491 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998546 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998603 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998707 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998792 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998935 4611 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999035 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.997594 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.998031 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999378 4611 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999449 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999508 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999560 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999614 4611 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999685 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999750 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999810 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999866 4611 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999918 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:32 crc kubenswrapper[4611]: I0929 12:40:32.999983 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000053 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000127 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000204 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000262 4611 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000312 4611 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000368 4611 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000505 4611 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000574 4611 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000644 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000710 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000765 4611 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000822 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000883 4611 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000939 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.000994 4611 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001050 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001102 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001156 4611 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001207 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001258 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001310 4611 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001378 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001644 4611 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001782 4611 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001860 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.001931 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002009 4611 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002089 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002172 4611 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002255 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002339 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002403 4611 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002479 4611 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002551 4611 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002701 4611 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002767 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002830 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002892 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.002958 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003029 4611 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003099 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003180 4611 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003252 4611 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003313 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003366 4611 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003458 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003615 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003695 4611 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003746 4611 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003795 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003854 4611 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003914 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.003971 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004025 4611 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004076 4611 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004126 4611 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004196 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004277 4611 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004357 4611 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.004432 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.021400 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.028802 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 12:40:33 crc kubenswrapper[4611]: W0929 12:40:33.032656 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-30ab7fbfbcdc4adfcbaa5542484663ae0e7a2c2f217c1d0d87d2c68d1a383342 WatchSource:0}: Error finding container 30ab7fbfbcdc4adfcbaa5542484663ae0e7a2c2f217c1d0d87d2c68d1a383342: Status 404 returned error can't find the container with id 30ab7fbfbcdc4adfcbaa5542484663ae0e7a2c2f217c1d0d87d2c68d1a383342 Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.034795 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 12:40:33 crc kubenswrapper[4611]: W0929 12:40:33.041036 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-ceb149eaa3daa540dd6be78977660686f58eb30ff039f07d1b6f020858a723fe WatchSource:0}: Error finding container ceb149eaa3daa540dd6be78977660686f58eb30ff039f07d1b6f020858a723fe: Status 404 returned error can't find the container with id ceb149eaa3daa540dd6be78977660686f58eb30ff039f07d1b6f020858a723fe Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.257562 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.270666 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.274271 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.280234 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.289731 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.300942 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.311089 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.331021 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.341445 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.352000 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.378644 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.406769 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.406879 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.406906 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.407018 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.407085 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:34.407068013 +0000 UTC m=+21.298587619 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.407226 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.407268 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:40:34.407242148 +0000 UTC m=+21.298761754 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.407297 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:34.40728881 +0000 UTC m=+21.298808416 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.408610 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.420600 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.430216 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.440681 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.452433 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.462517 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.474541 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.488679 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.497813 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.508127 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.508195 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508306 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508345 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508357 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508316 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508410 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:34.508394327 +0000 UTC m=+21.399913933 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508415 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508426 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:33 crc kubenswrapper[4611]: E0929 12:40:33.508473 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:34.508444788 +0000 UTC m=+21.399964394 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.742954 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.744117 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.746035 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.747746 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.748968 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.749661 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.750467 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.751438 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.752270 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.753516 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.754133 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.755255 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.755741 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.756242 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.757172 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.757748 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.759084 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.759580 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.760349 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.760602 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.761395 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.762909 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.764289 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.765006 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.766096 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.766741 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.768558 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.770060 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.770794 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.772391 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.772594 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.773516 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.774937 4611 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.775674 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.777203 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.778854 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.779336 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.780560 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.781361 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.782061 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.782871 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.785191 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.785781 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.786375 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.787451 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.789151 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.790761 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.791432 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.792835 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.794274 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.796074 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.796752 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.798024 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.798814 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.799573 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.799686 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.800718 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.801330 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.817548 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.831429 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b"} Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.831528 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"470ee9d33343cf0bafe1d4c8161421d2c7fbb2ddd6709366300de18673c63c2e"} Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.832446 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ceb149eaa3daa540dd6be78977660686f58eb30ff039f07d1b6f020858a723fe"} Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.834259 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94"} Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.834319 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"30ab7fbfbcdc4adfcbaa5542484663ae0e7a2c2f217c1d0d87d2c68d1a383342"} Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.834682 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.846705 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.860768 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:33 crc kubenswrapper[4611]: I0929 12:40:33.877989 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.420942 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.421053 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.421120 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.421139 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:40:36.421099956 +0000 UTC m=+23.312619602 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.421185 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:36.421166768 +0000 UTC m=+23.312686414 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.421257 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.421498 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.421602 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:36.421579901 +0000 UTC m=+23.313099547 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.522532 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.522581 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522748 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522780 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522795 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522749 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522870 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522883 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.522847 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:36.522830173 +0000 UTC m=+23.414349789 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.523026 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:36.522974577 +0000 UTC m=+23.414494193 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.735509 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.735556 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.736207 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.735554 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.736451 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:34 crc kubenswrapper[4611]: E0929 12:40:34.736257 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.838315 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce"} Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.851963 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.861800 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.877551 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.888213 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.897569 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.909238 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.918354 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.930933 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.941648 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.954863 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.966158 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.976847 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.986652 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:34 crc kubenswrapper[4611]: I0929 12:40:34.995083 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.010485 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.020243 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.029971 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.042989 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.846542 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc"} Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.862543 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.875740 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.890553 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.904401 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.917245 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.935801 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.952393 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.964262 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:35 crc kubenswrapper[4611]: I0929 12:40:35.978913 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:35Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.438989 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.439156 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:40:40.439123824 +0000 UTC m=+27.330643430 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.439234 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.439266 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.439402 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.439421 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.439467 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:40.439451294 +0000 UTC m=+27.330970900 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.439581 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:40.439522916 +0000 UTC m=+27.331042602 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.540073 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.540392 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.540263 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.540597 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.540718 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.540870 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:40.54084081 +0000 UTC m=+27.432360416 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.540491 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.541049 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.541123 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.541225 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:40.541212362 +0000 UTC m=+27.432731978 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.735936 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.735986 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:36 crc kubenswrapper[4611]: I0929 12:40:36.736046 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.736058 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.736311 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:36 crc kubenswrapper[4611]: E0929 12:40:36.736389 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.671779 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-d7d72"] Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.672361 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-d2gnq"] Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.672571 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.672666 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-csch6"] Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.672800 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.672935 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.674583 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-kf52c"] Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.674899 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.678317 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.678762 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.678970 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.678999 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.679133 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.679189 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.679186 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.679607 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.679684 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.685370 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.685404 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.685580 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.685687 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.685746 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.685760 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.702240 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.728642 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.735708 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.735786 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.735708 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:38 crc kubenswrapper[4611]: E0929 12:40:38.735927 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:38 crc kubenswrapper[4611]: E0929 12:40:38.735808 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:38 crc kubenswrapper[4611]: E0929 12:40:38.736088 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.744207 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.756575 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757796 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-system-cni-dir\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757830 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-system-cni-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757850 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-daemon-config\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757864 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-conf-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757879 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-tuning-conf-dir\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757894 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-socket-dir-parent\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757914 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-cni-multus\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757928 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-os-release\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.757943 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-k8s-cni-cncf-io\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758000 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-cni-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758079 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svqc7\" (UniqueName: \"kubernetes.io/projected/9fea0777-8bbe-4100-806a-2580c80c902c-kube-api-access-svqc7\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758110 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/18731b4e-6360-4d87-b586-0a9dc6b5af1e-cni-binary-copy\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758135 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2ffe5254-1d23-44e3-8d04-5da256598928-hosts-file\") pod \"node-resolver-kf52c\" (UID: \"2ffe5254-1d23-44e3-8d04-5da256598928\") " pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758156 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9fea0777-8bbe-4100-806a-2580c80c902c-rootfs\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758181 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/457d9143-5ef6-484f-8ff9-31d671abb269-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758206 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-multus-certs\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758228 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-hostroot\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758247 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/457d9143-5ef6-484f-8ff9-31d671abb269-cni-binary-copy\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758267 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9fea0777-8bbe-4100-806a-2580c80c902c-mcd-auth-proxy-config\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758292 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-etc-kubernetes\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758315 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dxj8\" (UniqueName: \"kubernetes.io/projected/18731b4e-6360-4d87-b586-0a9dc6b5af1e-kube-api-access-5dxj8\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758334 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-cni-bin\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758350 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-cnibin\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758368 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbtwh\" (UniqueName: \"kubernetes.io/projected/2ffe5254-1d23-44e3-8d04-5da256598928-kube-api-access-cbtwh\") pod \"node-resolver-kf52c\" (UID: \"2ffe5254-1d23-44e3-8d04-5da256598928\") " pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758387 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9fea0777-8bbe-4100-806a-2580c80c902c-proxy-tls\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758434 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-os-release\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758452 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-netns\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758468 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-kubelet\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758489 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-cnibin\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.758523 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dzdp\" (UniqueName: \"kubernetes.io/projected/457d9143-5ef6-484f-8ff9-31d671abb269-kube-api-access-9dzdp\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.770340 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.785200 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.802770 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.817201 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.831071 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.843287 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.856857 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859086 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-tuning-conf-dir\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859128 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-socket-dir-parent\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859158 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-cni-multus\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859179 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-os-release\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859199 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-k8s-cni-cncf-io\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859229 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-cni-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859253 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svqc7\" (UniqueName: \"kubernetes.io/projected/9fea0777-8bbe-4100-806a-2580c80c902c-kube-api-access-svqc7\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859274 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/18731b4e-6360-4d87-b586-0a9dc6b5af1e-cni-binary-copy\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859294 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2ffe5254-1d23-44e3-8d04-5da256598928-hosts-file\") pod \"node-resolver-kf52c\" (UID: \"2ffe5254-1d23-44e3-8d04-5da256598928\") " pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859315 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9fea0777-8bbe-4100-806a-2580c80c902c-rootfs\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859339 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/457d9143-5ef6-484f-8ff9-31d671abb269-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859360 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-multus-certs\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859354 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-k8s-cni-cncf-io\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859381 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-hostroot\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859402 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/457d9143-5ef6-484f-8ff9-31d671abb269-cni-binary-copy\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859428 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9fea0777-8bbe-4100-806a-2580c80c902c-mcd-auth-proxy-config\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859452 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-etc-kubernetes\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859473 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dxj8\" (UniqueName: \"kubernetes.io/projected/18731b4e-6360-4d87-b586-0a9dc6b5af1e-kube-api-access-5dxj8\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859481 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-socket-dir-parent\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859494 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-cni-bin\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859536 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-cni-bin\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859568 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-cnibin\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859599 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbtwh\" (UniqueName: \"kubernetes.io/projected/2ffe5254-1d23-44e3-8d04-5da256598928-kube-api-access-cbtwh\") pod \"node-resolver-kf52c\" (UID: \"2ffe5254-1d23-44e3-8d04-5da256598928\") " pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859636 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9fea0777-8bbe-4100-806a-2580c80c902c-proxy-tls\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859667 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-os-release\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859679 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-os-release\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859717 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-netns\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859731 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-os-release\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859740 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-kubelet\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859759 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-cnibin\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859777 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-cnibin\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859779 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dzdp\" (UniqueName: \"kubernetes.io/projected/457d9143-5ef6-484f-8ff9-31d671abb269-kube-api-access-9dzdp\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859837 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-system-cni-dir\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859858 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-system-cni-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859875 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-cni-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859912 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-netns\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859943 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-kubelet\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859971 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-cnibin\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860257 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-system-cni-dir\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860303 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-system-cni-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860334 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-hostroot\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860365 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9fea0777-8bbe-4100-806a-2580c80c902c-rootfs\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860478 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/18731b4e-6360-4d87-b586-0a9dc6b5af1e-cni-binary-copy\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860526 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-daemon-config\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.859878 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-daemon-config\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860578 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-conf-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860654 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-multus-conf-dir\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860768 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-var-lib-cni-multus\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860533 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-host-run-multus-certs\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.860849 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/18731b4e-6360-4d87-b586-0a9dc6b5af1e-etc-kubernetes\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.861013 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2ffe5254-1d23-44e3-8d04-5da256598928-hosts-file\") pod \"node-resolver-kf52c\" (UID: \"2ffe5254-1d23-44e3-8d04-5da256598928\") " pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.861015 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/457d9143-5ef6-484f-8ff9-31d671abb269-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.861141 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/457d9143-5ef6-484f-8ff9-31d671abb269-tuning-conf-dir\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.861282 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9fea0777-8bbe-4100-806a-2580c80c902c-mcd-auth-proxy-config\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.861322 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/457d9143-5ef6-484f-8ff9-31d671abb269-cni-binary-copy\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.864592 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9fea0777-8bbe-4100-806a-2580c80c902c-proxy-tls\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.889370 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.891458 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dzdp\" (UniqueName: \"kubernetes.io/projected/457d9143-5ef6-484f-8ff9-31d671abb269-kube-api-access-9dzdp\") pod \"multus-additional-cni-plugins-d7d72\" (UID: \"457d9143-5ef6-484f-8ff9-31d671abb269\") " pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.892183 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svqc7\" (UniqueName: \"kubernetes.io/projected/9fea0777-8bbe-4100-806a-2580c80c902c-kube-api-access-svqc7\") pod \"machine-config-daemon-d2gnq\" (UID: \"9fea0777-8bbe-4100-806a-2580c80c902c\") " pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.907436 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbtwh\" (UniqueName: \"kubernetes.io/projected/2ffe5254-1d23-44e3-8d04-5da256598928-kube-api-access-cbtwh\") pod \"node-resolver-kf52c\" (UID: \"2ffe5254-1d23-44e3-8d04-5da256598928\") " pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.913166 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dxj8\" (UniqueName: \"kubernetes.io/projected/18731b4e-6360-4d87-b586-0a9dc6b5af1e-kube-api-access-5dxj8\") pod \"multus-csch6\" (UID: \"18731b4e-6360-4d87-b586-0a9dc6b5af1e\") " pod="openshift-multus/multus-csch6" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.925104 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.938214 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.958426 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.971568 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.982190 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.989940 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-d7d72" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.996404 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:38Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:38 crc kubenswrapper[4611]: I0929 12:40:38.996606 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:40:39 crc kubenswrapper[4611]: W0929 12:40:39.006747 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fea0777_8bbe_4100_806a_2580c80c902c.slice/crio-259470f254635c354fc70f779c376ea5bff1e18c026defe7262925eeeb3c9f4e WatchSource:0}: Error finding container 259470f254635c354fc70f779c376ea5bff1e18c026defe7262925eeeb3c9f4e: Status 404 returned error can't find the container with id 259470f254635c354fc70f779c376ea5bff1e18c026defe7262925eeeb3c9f4e Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.007310 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-csch6" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.009772 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.013383 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kf52c" Sep 29 12:40:39 crc kubenswrapper[4611]: W0929 12:40:39.023635 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18731b4e_6360_4d87_b586_0a9dc6b5af1e.slice/crio-f1ec7db81d6e9703e2f7b171db6b498a78e3fcf694e51a6a0404c95a48be7c13 WatchSource:0}: Error finding container f1ec7db81d6e9703e2f7b171db6b498a78e3fcf694e51a6a0404c95a48be7c13: Status 404 returned error can't find the container with id f1ec7db81d6e9703e2f7b171db6b498a78e3fcf694e51a6a0404c95a48be7c13 Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.034410 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: W0929 12:40:39.042198 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ffe5254_1d23_44e3_8d04_5da256598928.slice/crio-944fee7cdfbdbe3db4e422affff3614afc83823fec6e28d3598e93af009fb612 WatchSource:0}: Error finding container 944fee7cdfbdbe3db4e422affff3614afc83823fec6e28d3598e93af009fb612: Status 404 returned error can't find the container with id 944fee7cdfbdbe3db4e422affff3614afc83823fec6e28d3598e93af009fb612 Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.054096 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p95nv"] Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.054857 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.055242 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060072 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060272 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060315 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060500 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060580 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060603 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.060762 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.072354 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.088902 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.093693 4611 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.095975 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.095996 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.096004 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.096074 4611 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.110473 4611 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.110736 4611 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.111505 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.111807 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.111851 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.111862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.111879 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.111890 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.123316 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: E0929 12:40:39.129690 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.133619 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.133658 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.133668 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.133682 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.133692 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.135836 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: E0929 12:40:39.146356 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.150578 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.151654 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.151681 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.151689 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.151704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.151713 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163690 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-netns\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163727 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-ovn\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163742 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-config\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163766 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-kubelet\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163780 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-script-lib\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163803 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-systemd-units\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163816 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163831 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-netd\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163843 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-env-overrides\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163857 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-slash\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163874 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-bin\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163888 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2pdp\" (UniqueName: \"kubernetes.io/projected/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-kube-api-access-j2pdp\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163903 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-systemd\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163915 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-var-lib-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163928 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-etc-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163958 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovn-node-metrics-cert\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.163986 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-node-log\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.164009 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-log-socket\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.164029 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-ovn-kubernetes\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.165900 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.169349 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: E0929 12:40:39.172478 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.176357 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.176383 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.176393 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.176406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.176414 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.187178 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: E0929 12:40:39.190189 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.196938 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.196984 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.196996 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.197031 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.197043 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.201569 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: E0929 12:40:39.210349 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: E0929 12:40:39.210516 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.212596 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.212645 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.212654 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.212824 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.212841 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.221818 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.233280 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.252058 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.266507 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-kubelet\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.266691 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-script-lib\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.266809 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-env-overrides\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.266920 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-systemd-units\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267019 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267099 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-netd\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267196 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-slash\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267291 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-bin\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267372 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2pdp\" (UniqueName: \"kubernetes.io/projected/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-kube-api-access-j2pdp\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267462 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovn-node-metrics-cert\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267560 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-systemd\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267683 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-var-lib-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267778 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-etc-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267852 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-netd\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267925 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-kubelet\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.267994 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-node-log\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268108 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-log-socket\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268216 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-ovn-kubernetes\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268309 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268417 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-config\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268537 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-systemd-units\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268560 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-netns\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268760 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-ovn\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268845 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268964 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-systemd\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268963 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-var-lib-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269189 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-etc-openvswitch\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269051 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-node-log\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269075 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-log-socket\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269098 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-ovn-kubernetes\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269102 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-env-overrides\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269127 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269161 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-netns\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269176 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-slash\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.268811 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-script-lib\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269020 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-ovn\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.269666 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-config\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.270185 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-bin\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.275497 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.275966 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovn-node-metrics-cert\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.288539 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.291732 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2pdp\" (UniqueName: \"kubernetes.io/projected/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-kube-api-access-j2pdp\") pod \"ovnkube-node-p95nv\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.303942 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.314903 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.314929 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.314940 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.314953 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.314962 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.315334 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.404255 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:39 crc kubenswrapper[4611]: W0929 12:40:39.414290 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfec2820_7242_4dd6_9fa5_4ebe161f99ba.slice/crio-da67c5f3e62b4626745bbc5bc9f59d7e136368590f0ecd66fa748d21779fb1a0 WatchSource:0}: Error finding container da67c5f3e62b4626745bbc5bc9f59d7e136368590f0ecd66fa748d21779fb1a0: Status 404 returned error can't find the container with id da67c5f3e62b4626745bbc5bc9f59d7e136368590f0ecd66fa748d21779fb1a0 Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.416603 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.416651 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.416662 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.416676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.416686 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.519079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.519120 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.519143 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.519157 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.519168 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.621725 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.621764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.621775 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.621792 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.621803 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.723550 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.723585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.723594 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.723607 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.723616 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.825585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.825823 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.825890 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.825961 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.826024 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.857480 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerStarted","Data":"c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.857531 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerStarted","Data":"f1ec7db81d6e9703e2f7b171db6b498a78e3fcf694e51a6a0404c95a48be7c13"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.859645 4611 generic.go:334] "Generic (PLEG): container finished" podID="457d9143-5ef6-484f-8ff9-31d671abb269" containerID="44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11" exitCode=0 Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.859724 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerDied","Data":"44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.859877 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerStarted","Data":"acf69785fdfc043c1ce68938af3dbc450227f05e7405e382d6581ed34f962a2c"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.861331 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" exitCode=0 Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.861407 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.861432 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"da67c5f3e62b4626745bbc5bc9f59d7e136368590f0ecd66fa748d21779fb1a0"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.863269 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kf52c" event={"ID":"2ffe5254-1d23-44e3-8d04-5da256598928","Type":"ContainerStarted","Data":"eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.863299 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kf52c" event={"ID":"2ffe5254-1d23-44e3-8d04-5da256598928","Type":"ContainerStarted","Data":"944fee7cdfbdbe3db4e422affff3614afc83823fec6e28d3598e93af009fb612"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.864870 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.864901 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.864916 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"259470f254635c354fc70f779c376ea5bff1e18c026defe7262925eeeb3c9f4e"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.888789 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.905460 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.921014 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.928385 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.928422 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.928430 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.928443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.928452 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:39Z","lastTransitionTime":"2025-09-29T12:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.935765 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.946370 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.964761 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.978642 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:39 crc kubenswrapper[4611]: I0929 12:40:39.992084 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:39Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.012007 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.025613 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.031233 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.031261 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.031272 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.031286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.031296 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.047332 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.071545 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.086783 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.105986 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.124052 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.134218 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.134264 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.134275 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.134291 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.134304 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.146203 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.163241 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.186848 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.201289 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.216694 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.232954 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.236332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.236373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.236386 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.236406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.236420 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.248366 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.258840 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.278533 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.298922 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.311026 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.323247 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.333982 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.338723 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.338755 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.338764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.338779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.338788 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.441037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.441072 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.441101 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.441117 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.441126 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.480637 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.480746 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.480774 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:40:48.480749476 +0000 UTC m=+35.372269082 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.480827 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.480897 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.480925 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:48.480909981 +0000 UTC m=+35.372429637 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.481011 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.481054 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:48.481045135 +0000 UTC m=+35.372564741 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.543690 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.543722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.543730 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.543744 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.543756 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.582379 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.582445 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582609 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582648 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582663 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582680 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582727 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582733 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:48.582711155 +0000 UTC m=+35.474230761 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582743 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.582825 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:40:48.582801927 +0000 UTC m=+35.474321753 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.646861 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.646902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.646913 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.646930 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.646944 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.735836 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.735899 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.735906 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.736068 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.736171 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:40 crc kubenswrapper[4611]: E0929 12:40:40.736248 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.749991 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.750037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.750050 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.750071 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.750085 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.852455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.852494 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.852503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.852519 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.852529 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.869811 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerStarted","Data":"55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.872407 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.872443 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.872453 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.872462 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.897268 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.913295 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.926401 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.939188 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.953834 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.955591 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.955662 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.955673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.955688 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.955698 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:40Z","lastTransitionTime":"2025-09-29T12:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.965563 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.985352 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:40 crc kubenswrapper[4611]: I0929 12:40:40.998196 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.008683 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.023353 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.036212 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.049049 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.058326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.058365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.058373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.058389 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.058399 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.063233 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.075568 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.162601 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.162673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.162684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.162701 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.162737 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.270482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.271009 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.271024 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.271066 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.271079 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.277438 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-648vg"] Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.277900 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.281571 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.281765 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.285556 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.289309 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.302650 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.316320 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.326348 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.339079 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.349992 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.372257 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.373866 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.373886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.373896 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.373910 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.373920 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.386851 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.389718 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24cnr\" (UniqueName: \"kubernetes.io/projected/49de17a8-ece1-4707-9f9a-5c192e484b1e-kube-api-access-24cnr\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.389771 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/49de17a8-ece1-4707-9f9a-5c192e484b1e-host\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.389833 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/49de17a8-ece1-4707-9f9a-5c192e484b1e-serviceca\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.401706 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.415970 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.427372 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.441974 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.453969 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.473208 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.476090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.476124 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.476137 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.476153 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.476164 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.490398 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/49de17a8-ece1-4707-9f9a-5c192e484b1e-host\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.490457 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/49de17a8-ece1-4707-9f9a-5c192e484b1e-serviceca\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.490477 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24cnr\" (UniqueName: \"kubernetes.io/projected/49de17a8-ece1-4707-9f9a-5c192e484b1e-kube-api-access-24cnr\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.490701 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/49de17a8-ece1-4707-9f9a-5c192e484b1e-host\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.491863 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/49de17a8-ece1-4707-9f9a-5c192e484b1e-serviceca\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.521057 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.521451 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24cnr\" (UniqueName: \"kubernetes.io/projected/49de17a8-ece1-4707-9f9a-5c192e484b1e-kube-api-access-24cnr\") pod \"node-ca-648vg\" (UID: \"49de17a8-ece1-4707-9f9a-5c192e484b1e\") " pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.547805 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.578810 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.578862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.578877 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.578895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.578909 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.605379 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-648vg" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.680610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.680657 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.680668 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.680684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.680695 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.783882 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.783935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.783953 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.783978 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.783996 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.879423 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.879487 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.881236 4611 generic.go:334] "Generic (PLEG): container finished" podID="457d9143-5ef6-484f-8ff9-31d671abb269" containerID="55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee" exitCode=0 Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.881316 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerDied","Data":"55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.884103 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-648vg" event={"ID":"49de17a8-ece1-4707-9f9a-5c192e484b1e","Type":"ContainerStarted","Data":"399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.884140 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-648vg" event={"ID":"49de17a8-ece1-4707-9f9a-5c192e484b1e","Type":"ContainerStarted","Data":"8de922148eb84bbc0315fbbbb5dc7370a88e80aab42d557e45d2850ce8a398bf"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.885275 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.885302 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.885316 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.885330 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.885341 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.908374 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.926346 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.943920 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.959994 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.969498 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.981081 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.988329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.988363 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.988377 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.988393 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.988404 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:41Z","lastTransitionTime":"2025-09-29T12:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:41 crc kubenswrapper[4611]: I0929 12:40:41.990082 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:41Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.011122 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.023591 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.039905 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.055327 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.064505 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.077128 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.089424 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.090839 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.090867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.090876 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.090889 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.090898 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.100794 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.119238 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.133334 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.143953 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.155854 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.165910 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.176231 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.188068 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.192578 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.192604 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.192614 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.192678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.192689 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.199748 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.208070 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.224024 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.235010 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.246534 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.258175 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.283359 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.295097 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.295136 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.295147 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.295163 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.295175 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.323724 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.397348 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.397379 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.397387 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.397400 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.397409 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.499308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.499340 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.499349 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.499365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.499374 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.601610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.601692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.601704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.601726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.601739 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.703676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.703727 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.703745 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.703776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.703793 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.735493 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.735519 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:42 crc kubenswrapper[4611]: E0929 12:40:42.735601 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.735495 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:42 crc kubenswrapper[4611]: E0929 12:40:42.735737 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:42 crc kubenswrapper[4611]: E0929 12:40:42.735788 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.785411 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.797115 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.806000 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.806037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.806045 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.806060 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.806069 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.808717 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.818544 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.832022 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.846368 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.863091 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.874461 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.884452 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.888798 4611 generic.go:334] "Generic (PLEG): container finished" podID="457d9143-5ef6-484f-8ff9-31d671abb269" containerID="59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b" exitCode=0 Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.888848 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerDied","Data":"59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.908032 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.908065 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.908076 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.908090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.908099 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:42Z","lastTransitionTime":"2025-09-29T12:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.912009 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.922906 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.934200 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.944685 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.956542 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.981217 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:42 crc kubenswrapper[4611]: I0929 12:40:42.994459 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:42Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.009985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.010020 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.010030 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.010068 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.010077 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.012913 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.026368 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.040039 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.082180 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.112237 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.112271 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.112280 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.112294 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.112302 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.122351 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.163645 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.202492 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.214074 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.214102 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.214110 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.214126 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.214136 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.242595 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.281584 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.316596 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.316658 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.316669 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.316686 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.316697 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.326915 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.363602 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.403353 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.419757 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.419792 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.419830 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.419846 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.419856 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.444881 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.481418 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.522304 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.522353 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.522368 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.522387 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.522398 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.524440 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.624280 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.624324 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.624333 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.624350 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.624361 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.726382 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.726432 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.726447 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.726469 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.726482 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.748646 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.758928 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.767205 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.776583 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.793152 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.811049 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.822359 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.828667 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.828715 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.828728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.828747 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.828761 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.846479 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.884053 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.896128 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.898550 4611 generic.go:334] "Generic (PLEG): container finished" podID="457d9143-5ef6-484f-8ff9-31d671abb269" containerID="b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490" exitCode=0 Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.898580 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerDied","Data":"b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.926754 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.931490 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.931522 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.931533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.931547 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.931556 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:43Z","lastTransitionTime":"2025-09-29T12:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:43 crc kubenswrapper[4611]: I0929 12:40:43.964312 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.002882 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.034126 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.034162 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.034170 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.034187 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.034196 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.042481 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.088988 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.126321 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.136893 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.136938 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.136948 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.136960 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.136969 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.162733 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.203506 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.238781 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.238820 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.238829 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.238842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.238853 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.246967 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.283755 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.328086 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.341260 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.341322 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.341338 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.341355 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.341579 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.384953 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.411502 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.444311 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.444375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.444384 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.444397 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.444407 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.445340 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.481902 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.526500 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.546341 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.546383 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.546394 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.546411 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.546423 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.560743 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.605916 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.643782 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.648564 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.648599 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.648610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.648642 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.648655 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.684923 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.726131 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.736355 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:44 crc kubenswrapper[4611]: E0929 12:40:44.736702 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.736754 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.736772 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:44 crc kubenswrapper[4611]: E0929 12:40:44.737106 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:44 crc kubenswrapper[4611]: E0929 12:40:44.737027 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.750687 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.750721 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.750734 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.750749 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.750760 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.852814 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.852858 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.852867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.852881 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.852891 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.904471 4611 generic.go:334] "Generic (PLEG): container finished" podID="457d9143-5ef6-484f-8ff9-31d671abb269" containerID="6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1" exitCode=0 Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.904520 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerDied","Data":"6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.917954 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.938540 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.956554 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.956873 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.956921 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.956932 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.956948 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.956959 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:44Z","lastTransitionTime":"2025-09-29T12:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.972893 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:44 crc kubenswrapper[4611]: I0929 12:40:44.994058 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:44Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.009913 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.024744 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.043009 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.058607 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.058670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.058678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.058692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.058701 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.081499 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.124314 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.160490 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.160533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.160543 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.160556 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.160566 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.164492 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.207526 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.245668 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.262666 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.262699 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.262708 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.262722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.262732 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.282137 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.324482 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.364437 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.364471 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.364482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.364497 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.364506 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.466508 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.467429 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.467501 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.467568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.467646 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.571090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.571142 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.571153 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.571171 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.571184 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.674327 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.674370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.674381 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.674400 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.674411 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.777223 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.777262 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.777271 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.777288 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.777299 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.880217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.880287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.880304 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.880334 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.880351 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.912605 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.912934 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.915301 4611 generic.go:334] "Generic (PLEG): container finished" podID="457d9143-5ef6-484f-8ff9-31d671abb269" containerID="7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885" exitCode=0 Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.915339 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerDied","Data":"7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.930287 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.938694 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.942966 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.957033 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.970221 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.983048 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.983114 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.983173 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.983190 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.983200 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:45Z","lastTransitionTime":"2025-09-29T12:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.984592 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:45 crc kubenswrapper[4611]: I0929 12:40:45.999020 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:45Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.015009 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.025356 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.046821 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.061253 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.074242 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.085273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.085317 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.085326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.085342 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.085353 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.086692 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.099885 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.115047 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.134013 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.155963 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.169543 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.180274 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.187291 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.187316 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.187324 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.187336 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.187345 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.191449 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.200539 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.211103 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.221736 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.245820 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.282618 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.289110 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.289158 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.289170 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.289187 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.289197 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.324293 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.367744 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.391492 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.391526 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.391535 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.391551 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.391561 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.404584 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.442775 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.485944 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.493888 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.493919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.493928 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.493943 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.493953 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.523419 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.595834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.596152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.596218 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.596278 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.596332 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.699058 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.699278 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.699372 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.699471 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.699568 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.735586 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.735586 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:46 crc kubenswrapper[4611]: E0929 12:40:46.736016 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.736098 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:46 crc kubenswrapper[4611]: E0929 12:40:46.735931 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:46 crc kubenswrapper[4611]: E0929 12:40:46.736180 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.802240 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.802276 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.802286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.802301 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.802313 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.905286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.905331 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.905341 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.905358 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.905368 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:46Z","lastTransitionTime":"2025-09-29T12:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.921598 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.921722 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" event={"ID":"457d9143-5ef6-484f-8ff9-31d671abb269","Type":"ContainerStarted","Data":"ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5"} Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.921967 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.944142 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.947892 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.962270 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.974784 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.987018 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:46 crc kubenswrapper[4611]: I0929 12:40:46.998806 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:46Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.007817 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.007853 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.007863 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.007876 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.007885 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.012386 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.025747 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.042453 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.055899 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.110637 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.110673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.110684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.110704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.110714 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.111824 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.128671 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.142547 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.155511 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.167727 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.180896 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.193583 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.203371 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.213583 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.213643 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.213656 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.213672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.213682 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.252146 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.283427 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.315500 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.315550 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.315562 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.315578 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.315589 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.324513 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.365583 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.402955 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.417791 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.417865 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.417904 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.417919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.417930 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.444219 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.484030 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.522148 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.524210 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.524246 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.524258 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.524276 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.524288 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.568864 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.604813 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.626078 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.626112 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.626124 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.626139 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.626149 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.644355 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.682353 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.720458 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:47Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.728170 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.728196 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.728204 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.728217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.728226 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.831252 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.831301 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.831316 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.831332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.831342 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.924840 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.932736 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.932788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.932799 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.932812 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:47 crc kubenswrapper[4611]: I0929 12:40:47.932820 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:47Z","lastTransitionTime":"2025-09-29T12:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.035432 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.035476 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.035486 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.035503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.035513 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.138049 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.138126 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.138141 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.138156 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.138166 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.240684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.240736 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.240747 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.240760 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.240768 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.342969 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.343009 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.343022 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.343038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.343051 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.445178 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.445206 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.445216 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.445228 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.445237 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.547882 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.547912 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.547920 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.547931 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.547940 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.562220 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.562337 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.562390 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.562513 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.562569 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:04.562552297 +0000 UTC m=+51.454071903 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.563195 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.563260 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:04.563226168 +0000 UTC m=+51.454745774 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.563296 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:41:04.56328857 +0000 UTC m=+51.454808176 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.650330 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.650356 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.650364 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.650375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.650385 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.662847 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663032 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663058 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663071 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663126 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:04.663110572 +0000 UTC m=+51.554630178 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.663288 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663421 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663441 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663450 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.663479 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:04.663471053 +0000 UTC m=+51.554990659 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.735855 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.735896 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.735984 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.736026 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.736129 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:48 crc kubenswrapper[4611]: E0929 12:40:48.736200 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.752135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.752170 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.752180 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.752193 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.752202 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.854038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.854075 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.854088 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.854104 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.854115 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.927272 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.956672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.956711 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.956721 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.956761 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:48 crc kubenswrapper[4611]: I0929 12:40:48.956771 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:48Z","lastTransitionTime":"2025-09-29T12:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.058808 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.058838 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.058846 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.058859 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.058867 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.161275 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.161314 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.161325 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.161341 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.161352 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.263148 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.263198 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.263206 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.263218 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.263226 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.365354 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.365386 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.365395 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.365462 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.365477 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.467181 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.467229 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.467241 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.467258 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.467270 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.526588 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.526617 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.526647 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.526660 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.526668 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: E0929 12:40:49.540809 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.545016 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.545084 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.545098 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.545114 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.545126 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: E0929 12:40:49.557673 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.561421 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.561452 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.561460 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.561473 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.561482 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: E0929 12:40:49.573393 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.577500 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.577539 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.577548 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.577561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.577569 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: E0929 12:40:49.589760 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.593040 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.593090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.593108 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.593120 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.593146 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: E0929 12:40:49.605544 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: E0929 12:40:49.605762 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.607249 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.607291 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.607308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.607324 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.607334 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.709580 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.709654 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.709665 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.709680 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.709690 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.812182 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.812214 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.812222 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.812236 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.812245 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.914374 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.914421 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.914438 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.914455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.914466 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:49Z","lastTransitionTime":"2025-09-29T12:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.931767 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/0.log" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.934493 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7" exitCode=1 Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.934547 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7"} Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.935294 4611 scope.go:117] "RemoveContainer" containerID="60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.951155 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.964167 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.977573 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:49 crc kubenswrapper[4611]: I0929 12:40:49.996204 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:49Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.007991 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.016672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.016712 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.016721 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.016736 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.016748 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.027864 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.040290 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.054383 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.066347 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.077077 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.098587 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.113595 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.121570 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.121613 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.121642 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.121659 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.121669 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.125322 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.135720 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.143987 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.223936 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.223970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.223980 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.223993 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.224003 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.326586 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.326618 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.326643 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.326658 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.326669 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.428366 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.428406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.428417 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.428432 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.428445 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.530305 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.530333 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.530343 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.530356 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.530364 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.632831 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.632873 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.632885 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.632900 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.632914 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735242 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735274 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735370 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:50 crc kubenswrapper[4611]: E0929 12:40:50.735589 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:50 crc kubenswrapper[4611]: E0929 12:40:50.735656 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:50 crc kubenswrapper[4611]: E0929 12:40:50.735724 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735749 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735769 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735777 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735793 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.735803 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.827056 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg"] Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.827799 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.829771 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.829785 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.837491 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.837520 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.837530 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.837543 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.837553 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.839988 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.851642 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.864891 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.877833 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.885838 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/483b90ff-34ed-4569-92d9-14770b68a086-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.885868 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/483b90ff-34ed-4569-92d9-14770b68a086-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.885887 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwvsb\" (UniqueName: \"kubernetes.io/projected/483b90ff-34ed-4569-92d9-14770b68a086-kube-api-access-hwvsb\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.885936 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/483b90ff-34ed-4569-92d9-14770b68a086-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.887479 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.905059 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.920868 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.931037 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.938467 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/0.log" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.938797 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.938826 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.938837 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.938851 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.938862 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:50Z","lastTransitionTime":"2025-09-29T12:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.940197 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476"} Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.940295 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.943147 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.953331 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.970552 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.987043 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/483b90ff-34ed-4569-92d9-14770b68a086-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.987085 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/483b90ff-34ed-4569-92d9-14770b68a086-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.987111 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwvsb\" (UniqueName: \"kubernetes.io/projected/483b90ff-34ed-4569-92d9-14770b68a086-kube-api-access-hwvsb\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.987134 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/483b90ff-34ed-4569-92d9-14770b68a086-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.987858 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:50Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.988792 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/483b90ff-34ed-4569-92d9-14770b68a086-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.989230 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/483b90ff-34ed-4569-92d9-14770b68a086-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:50 crc kubenswrapper[4611]: I0929 12:40:50.994165 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/483b90ff-34ed-4569-92d9-14770b68a086-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.002205 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.006715 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwvsb\" (UniqueName: \"kubernetes.io/projected/483b90ff-34ed-4569-92d9-14770b68a086-kube-api-access-hwvsb\") pod \"ovnkube-control-plane-749d76644c-pk8wg\" (UID: \"483b90ff-34ed-4569-92d9-14770b68a086\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.015797 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.026802 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.038221 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.042079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.042240 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.042253 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.042289 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.042303 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.054265 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.064079 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.081294 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.093419 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.106171 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.119272 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.129903 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.139341 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.141197 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.143923 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.143951 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.143981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.143997 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.144008 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.152791 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.167045 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.185482 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.198983 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.210702 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.222123 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.241274 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.246942 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.246980 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.246993 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.247010 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.247023 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.252017 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.349224 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.349279 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.349292 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.349317 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.349328 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.451884 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.451927 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.451937 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.451952 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.451962 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.561366 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.561410 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.561420 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.561435 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.561445 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.663375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.663427 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.663442 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.663458 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.663470 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.765663 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.765696 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.765703 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.765717 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.765725 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.868050 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.868094 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.868102 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.868116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.868126 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.945606 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" event={"ID":"483b90ff-34ed-4569-92d9-14770b68a086","Type":"ContainerStarted","Data":"84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.945680 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" event={"ID":"483b90ff-34ed-4569-92d9-14770b68a086","Type":"ContainerStarted","Data":"fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.945696 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" event={"ID":"483b90ff-34ed-4569-92d9-14770b68a086","Type":"ContainerStarted","Data":"801d637bde4c62dcc3d9c97f704b551e4349ad6376ccd62e6a0927c430b63b19"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.947668 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/1.log" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.948134 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/0.log" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.951462 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476" exitCode=1 Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.951520 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.951567 4611 scope.go:117] "RemoveContainer" containerID="60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.952813 4611 scope.go:117] "RemoveContainer" containerID="157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476" Sep 29 12:40:51 crc kubenswrapper[4611]: E0929 12:40:51.953138 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.962119 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.969670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.969709 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.969720 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.969736 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.969747 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:51Z","lastTransitionTime":"2025-09-29T12:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:51 crc kubenswrapper[4611]: I0929 12:40:51.986249 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.004678 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.016954 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.030716 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.044388 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.059925 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.072366 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.072412 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.072423 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.072437 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.072448 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.075399 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.089100 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.105644 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.125188 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.139264 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.154789 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.173519 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.175064 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.175123 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.175137 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.175158 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.175172 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.184064 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.195664 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.217646 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.233239 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.245868 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.256547 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.267299 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.277472 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.277535 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.277566 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.277585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.277597 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.279718 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.292602 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.308031 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.316868 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-xtjl8"] Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.317448 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:52 crc kubenswrapper[4611]: E0929 12:40:52.317532 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.331113 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.347231 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.360132 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.380345 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.380399 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.380411 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.380435 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.380449 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.381722 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.397204 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.404348 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc4s5\" (UniqueName: \"kubernetes.io/projected/c2df08da-22ae-44b9-b568-06bafc65932c-kube-api-access-zc4s5\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.404395 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.411116 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.424822 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.439954 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.455595 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.473179 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.483369 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.483415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.483427 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.483448 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.483463 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.488283 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.500485 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.505126 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc4s5\" (UniqueName: \"kubernetes.io/projected/c2df08da-22ae-44b9-b568-06bafc65932c-kube-api-access-zc4s5\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.505230 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:52 crc kubenswrapper[4611]: E0929 12:40:52.505395 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:52 crc kubenswrapper[4611]: E0929 12:40:52.505515 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:40:53.005476408 +0000 UTC m=+39.896996114 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.520258 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.521881 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc4s5\" (UniqueName: \"kubernetes.io/projected/c2df08da-22ae-44b9-b568-06bafc65932c-kube-api-access-zc4s5\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.536864 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.551540 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.563672 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.575657 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.586543 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.586595 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.586610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.586648 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.586662 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.592382 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.616357 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.632018 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.641725 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.659154 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.673724 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.693178 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.697098 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.697407 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.697576 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.697772 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.697910 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.708227 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:52Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.735509 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.735586 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:52 crc kubenswrapper[4611]: E0929 12:40:52.735645 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.735695 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:52 crc kubenswrapper[4611]: E0929 12:40:52.735826 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:52 crc kubenswrapper[4611]: E0929 12:40:52.735906 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.800595 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.800695 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.800707 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.800756 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.800769 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.903156 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.903198 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.903211 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.903226 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.903237 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:52Z","lastTransitionTime":"2025-09-29T12:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:52 crc kubenswrapper[4611]: I0929 12:40:52.957709 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/1.log" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.005779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.005825 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.005840 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.005860 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.005875 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.010239 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:53 crc kubenswrapper[4611]: E0929 12:40:53.010355 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:53 crc kubenswrapper[4611]: E0929 12:40:53.010401 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:40:54.010385976 +0000 UTC m=+40.901905582 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.108337 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.108380 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.108391 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.108408 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.108421 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.210465 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.210520 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.210532 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.210549 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.210562 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.312547 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.312582 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.312592 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.312605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.312614 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.415323 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.415363 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.415371 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.415386 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.415396 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.517519 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.517554 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.517566 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.517581 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.517590 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.619762 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.619814 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.619825 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.619841 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.619850 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.723093 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.723135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.723152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.723200 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.723213 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.735840 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:53 crc kubenswrapper[4611]: E0929 12:40:53.736106 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.750327 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.762359 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.774679 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.789681 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.803140 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.817876 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.824726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.824763 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.824774 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.824792 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.824803 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.832417 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.845534 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.857981 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.868863 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.891909 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.910021 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.922903 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.927825 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.927869 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.927881 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.927920 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.928014 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:53Z","lastTransitionTime":"2025-09-29T12:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.934043 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.944829 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.958893 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:53 crc kubenswrapper[4611]: I0929 12:40:53.977540 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:40:53Z is after 2025-08-24T17:21:41Z" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.018024 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:54 crc kubenswrapper[4611]: E0929 12:40:54.018182 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:54 crc kubenswrapper[4611]: E0929 12:40:54.018280 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:40:56.018257025 +0000 UTC m=+42.909776761 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.030890 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.030933 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.030945 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.030962 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.030975 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.132942 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.133201 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.133307 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.133416 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.133504 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.237159 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.237210 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.237222 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.237238 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.237250 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.339461 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.339503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.339513 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.339527 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.339535 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.442561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.443145 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.443292 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.443400 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.443479 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.546870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.546903 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.546912 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.546927 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.546936 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.649590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.649665 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.649677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.649694 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.649706 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.735382 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.735388 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:54 crc kubenswrapper[4611]: E0929 12:40:54.735518 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:54 crc kubenswrapper[4611]: E0929 12:40:54.735587 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.735824 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:54 crc kubenswrapper[4611]: E0929 12:40:54.736050 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.752693 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.752761 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.752774 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.752794 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.752805 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.855060 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.855130 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.855148 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.855164 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.855177 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.957297 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.957332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.957340 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.957354 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:54 crc kubenswrapper[4611]: I0929 12:40:54.957363 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:54Z","lastTransitionTime":"2025-09-29T12:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.059673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.060212 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.060293 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.060381 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.060466 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.162999 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.163026 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.163053 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.163068 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.163076 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.265306 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.265358 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.265370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.265388 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.265401 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.367818 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.367855 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.367868 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.367881 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.367891 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.470610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.470671 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.470684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.470700 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.470711 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.573650 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.573678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.573702 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.573715 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.573725 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.676326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.676397 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.676420 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.676452 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.676476 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.736011 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:55 crc kubenswrapper[4611]: E0929 12:40:55.736229 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.780196 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.780241 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.780258 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.780279 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.780293 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.883217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.883285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.883302 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.883335 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.883355 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.986504 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.986586 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.986608 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.986678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:55 crc kubenswrapper[4611]: I0929 12:40:55.986700 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:55Z","lastTransitionTime":"2025-09-29T12:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.039459 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:56 crc kubenswrapper[4611]: E0929 12:40:56.039807 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:56 crc kubenswrapper[4611]: E0929 12:40:56.039988 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:41:00.039944307 +0000 UTC m=+46.931464053 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.089275 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.090117 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.090213 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.090343 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.090425 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.193890 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.194443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.194655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.194899 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.195104 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.300692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.301340 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.301451 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.301557 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.301669 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.405005 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.405038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.405046 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.405059 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.405070 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.508609 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.508708 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.508724 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.508747 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.508765 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.612193 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.612281 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.612305 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.612335 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.612355 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.715472 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.715520 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.715532 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.715555 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.715568 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.735480 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:56 crc kubenswrapper[4611]: E0929 12:40:56.735663 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.735663 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.735762 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:56 crc kubenswrapper[4611]: E0929 12:40:56.735919 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:56 crc kubenswrapper[4611]: E0929 12:40:56.736082 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.819440 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.819502 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.819515 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.819536 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.819551 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.922685 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.922985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.923078 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.923195 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:56 crc kubenswrapper[4611]: I0929 12:40:56.923298 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:56Z","lastTransitionTime":"2025-09-29T12:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.026605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.026677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.026692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.026710 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.026722 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.129805 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.129872 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.129892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.129921 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.129943 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.233080 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.233469 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.233595 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.233735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.233866 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.336545 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.336828 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.336904 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.336995 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.337082 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.440443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.440756 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.440924 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.441100 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.441253 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.544877 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.545180 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.545254 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.545322 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.545378 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.648922 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.649001 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.649022 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.649054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.649073 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.735557 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:57 crc kubenswrapper[4611]: E0929 12:40:57.736058 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.751816 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.751886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.751900 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.751923 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.751938 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.855785 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.855844 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.855859 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.855882 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.855899 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.959695 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.959747 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.959759 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.959779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:57 crc kubenswrapper[4611]: I0929 12:40:57.959828 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:57Z","lastTransitionTime":"2025-09-29T12:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.061966 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.062053 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.062076 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.062101 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.062119 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.165447 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.166223 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.166356 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.166486 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.166606 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.269567 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.269616 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.269655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.269677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.269697 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.371734 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.371774 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.371786 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.371803 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.371814 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.474270 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.474563 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.474872 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.474973 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.475093 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.578463 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.578513 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.578526 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.578547 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.578561 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.681348 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.681425 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.681440 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.681458 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.681497 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.735991 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:40:58 crc kubenswrapper[4611]: E0929 12:40:58.736120 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.736144 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.736199 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:40:58 crc kubenswrapper[4611]: E0929 12:40:58.736232 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:40:58 crc kubenswrapper[4611]: E0929 12:40:58.736449 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.783693 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.783740 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.783749 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.783765 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.783775 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.886281 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.886318 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.886328 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.886342 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.886350 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.988655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.988728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.988741 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.988755 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:58 crc kubenswrapper[4611]: I0929 12:40:58.988764 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:58Z","lastTransitionTime":"2025-09-29T12:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.091496 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.091550 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.091572 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.091596 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.091609 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.195094 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.195153 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.195165 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.195187 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.195202 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.298107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.298149 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.298161 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.298178 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.298188 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.401206 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.401270 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.401286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.401311 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.401328 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.503938 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.504277 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.504350 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.504590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.504699 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.606612 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.606678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.606690 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.606709 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.606721 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.709285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.709580 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.709763 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.709901 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.710036 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.735397 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:40:59 crc kubenswrapper[4611]: E0929 12:40:59.735567 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.812508 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.812552 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.812566 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.812590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.812603 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.914759 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.914946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.915047 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.915150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.915272 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.995227 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.995485 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.995571 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.995659 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:40:59 crc kubenswrapper[4611]: I0929 12:40:59.995729 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:40:59Z","lastTransitionTime":"2025-09-29T12:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.006906 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:00Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.010565 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.010745 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.010827 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.010905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.010980 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.023920 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:00Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.026946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.026992 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.027001 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.027020 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.027029 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.041288 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:00Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.044265 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.044312 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.044323 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.044340 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.044351 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.055361 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:00Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.058290 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.058337 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.058351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.058375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.058385 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.068515 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:00Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.068646 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.070025 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.070044 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.070052 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.070064 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.070072 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.082843 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.082992 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.083047 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:41:08.083032059 +0000 UTC m=+54.974551665 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.172003 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.172042 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.172054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.172071 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.172082 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.274215 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.274256 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.274265 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.274282 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.274292 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.376688 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.376726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.376735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.376751 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.376760 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.479049 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.479096 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.479107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.479123 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.479132 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.580850 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.580886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.580896 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.580910 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.580919 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.682921 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.682966 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.682984 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.683005 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.683027 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.736021 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.736021 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.736253 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.736168 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.736606 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:00 crc kubenswrapper[4611]: E0929 12:41:00.736811 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.786016 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.786056 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.786066 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.786082 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.786091 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.888996 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.889037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.889045 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.889060 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.889069 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.990978 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.991019 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.991033 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.991051 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:00 crc kubenswrapper[4611]: I0929 12:41:00.991066 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:00Z","lastTransitionTime":"2025-09-29T12:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.093457 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.093788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.093915 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.094028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.094114 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.196533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.196807 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.196937 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.197033 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.197162 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.300135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.300171 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.300180 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.300195 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.300204 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.402686 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.402723 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.402731 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.402745 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.402755 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.504743 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.504788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.504798 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.504813 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.504822 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.606763 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.606798 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.606806 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.606820 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.606828 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.708996 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.709036 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.709044 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.709058 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.709069 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.735580 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:01 crc kubenswrapper[4611]: E0929 12:41:01.735778 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.811655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.811688 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.811696 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.811708 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.811717 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.914594 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.914659 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.914670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.914686 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:01 crc kubenswrapper[4611]: I0929 12:41:01.914697 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:01Z","lastTransitionTime":"2025-09-29T12:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.021655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.021766 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.021788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.021823 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.021848 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.124641 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.124696 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.124723 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.124744 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.124759 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.227329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.227389 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.227409 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.227434 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.227453 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.330970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.331090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.331116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.331149 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.331171 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.435354 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.435406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.435419 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.435440 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.435454 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.538138 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.538165 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.538174 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.538186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.538194 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.640917 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.640961 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.640977 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.640997 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.641013 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.735988 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.736012 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.736022 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:02 crc kubenswrapper[4611]: E0929 12:41:02.736123 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:02 crc kubenswrapper[4611]: E0929 12:41:02.736224 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:02 crc kubenswrapper[4611]: E0929 12:41:02.736306 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.742487 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.742519 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.742529 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.742543 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.742552 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.844288 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.844335 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.844349 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.844365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.844391 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.946829 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.946865 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.946875 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.946888 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:02 crc kubenswrapper[4611]: I0929 12:41:02.946898 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:02Z","lastTransitionTime":"2025-09-29T12:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.049727 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.049758 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.049767 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.049807 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.049818 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.152518 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.152674 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.152714 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.152744 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.152766 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.255194 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.255272 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.255285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.255304 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.255317 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.357873 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.357907 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.357916 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.357928 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.357937 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.460116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.460152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.460160 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.460174 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.460183 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.562485 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.562522 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.562534 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.562582 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.562594 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.665361 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.665406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.665417 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.665433 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.665446 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.736208 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:03 crc kubenswrapper[4611]: E0929 12:41:03.736320 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.736612 4611 scope.go:117] "RemoveContainer" containerID="157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.758581 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.768009 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.768046 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.768056 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.768075 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.768086 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.773696 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.785756 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.797703 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.807680 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.819617 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.834147 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.846486 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.861356 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.870553 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.870585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.870594 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.870605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.870646 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.875408 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.887758 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.906315 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60e74442f7792a2293210d99433bf1a54ee24234a08505eff73b479b26a6a9b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:49Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 12:40:49.084914 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 12:40:49.084929 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 12:40:49.084971 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 12:40:49.084991 5834 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 12:40:49.084998 5834 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 12:40:49.085035 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 12:40:49.085037 5834 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 12:40:49.085059 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 12:40:49.085061 5834 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 12:40:49.085112 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 12:40:49.085121 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 12:40:49.085133 5834 factory.go:656] Stopping watch factory\\\\nI0929 12:40:49.085136 5834 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 12:40:49.085143 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 12:40:49.085144 5834 ovnkube.go:599] Stopped ovnkube\\\\nI0929 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.919820 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.933454 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.948243 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.959986 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.973430 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.973776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.973788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.973803 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.973812 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:03Z","lastTransitionTime":"2025-09-29T12:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.974551 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.985512 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.995032 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/1.log" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.997589 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610"} Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.997716 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:03Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:03 crc kubenswrapper[4611]: I0929 12:41:03.997751 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.009909 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.024250 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.036747 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.056032 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.077877 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.077914 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.077924 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.077938 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.077947 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.082537 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.103712 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.127421 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.139295 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.150638 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.152460 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.163382 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.172963 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.180119 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.180178 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.180190 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.180209 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.180219 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.183212 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.193297 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.221275 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.234425 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.245663 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.257898 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.269602 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.279120 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.281798 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.281826 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.281835 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.281848 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.281856 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.291011 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.319704 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.333918 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.341485 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.350292 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.359834 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.370109 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.381090 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.383435 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.383463 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.383472 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.383485 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.383493 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.391955 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.404864 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.415724 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.425585 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.440882 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:04Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.485449 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.485482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.485494 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.485510 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.485520 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.588221 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.588250 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.588285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.588298 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.588307 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.627223 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.627320 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.627358 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.627401 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:41:36.627379168 +0000 UTC m=+83.518898764 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.627434 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.627462 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.627481 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:36.627468571 +0000 UTC m=+83.518988177 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.627506 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:36.627497282 +0000 UTC m=+83.519016888 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.690683 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.690718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.690728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.690743 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.690754 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.729103 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.729203 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729349 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729380 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729390 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729446 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:36.72943091 +0000 UTC m=+83.620950506 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729451 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729489 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729512 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.729599 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:41:36.729577784 +0000 UTC m=+83.621097420 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.735606 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.735618 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.735727 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.735840 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.735914 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:04 crc kubenswrapper[4611]: E0929 12:41:04.736054 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.793292 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.793349 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.793360 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.793375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.793386 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.895681 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.895731 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.895748 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.895768 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.895783 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.997892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.997952 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.997975 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.997999 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:04 crc kubenswrapper[4611]: I0929 12:41:04.998016 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:04Z","lastTransitionTime":"2025-09-29T12:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.002198 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/2.log" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.003518 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/1.log" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.007800 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610" exitCode=1 Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.007840 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.007878 4611 scope.go:117] "RemoveContainer" containerID="157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.014079 4611 scope.go:117] "RemoveContainer" containerID="4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610" Sep 29 12:41:05 crc kubenswrapper[4611]: E0929 12:41:05.014514 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.033225 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.053951 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.070690 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.082960 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.093087 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.101028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.101308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.101541 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.101747 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.101878 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.106457 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.118837 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.128659 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.140382 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.153017 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.164925 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.174128 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.191010 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://157411cb68cc4421264c2b92ec394ffc0d2d9ea9c2fe83b32d042232bfc7a476\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"message\\\":\\\"-55646444c4-trplf openshift-ovn-kubernetes/ovnkube-node-p95nv openshift-dns/node-resolver-kf52c openshift-machine-config-operator/machine-config-daemon-d2gnq openshift-etcd/etcd-crc openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI0929 12:40:50.741741 5980 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0929 12:40:50.741758 5980 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741766 5980 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741776 5980 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 12:40:50.741783 5980 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 12:40:50.741788 5980 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 12:40:50.741797 5980 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 12:40:50.741848 5980 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.204997 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.205033 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.205044 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.205058 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.205067 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.209004 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.221838 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.233969 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.244481 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:05Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.307246 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.307535 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.307643 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.307712 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.307769 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.410695 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.410744 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.410755 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.410772 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.410783 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.512855 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.512899 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.512908 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.512923 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.512936 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.615786 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.615847 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.615856 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.615871 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.615882 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.718278 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.718314 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.718326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.718342 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.718353 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.736263 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:05 crc kubenswrapper[4611]: E0929 12:41:05.736728 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.821158 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.821203 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.821219 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.821239 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.821253 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.923589 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.923656 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.923672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.923693 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:05 crc kubenswrapper[4611]: I0929 12:41:05.923710 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:05Z","lastTransitionTime":"2025-09-29T12:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.013914 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/2.log" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.018204 4611 scope.go:117] "RemoveContainer" containerID="4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610" Sep 29 12:41:06 crc kubenswrapper[4611]: E0929 12:41:06.018412 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.026194 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.026233 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.026244 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.026262 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.026348 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.032713 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.045716 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.058253 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.068683 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.086642 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.098483 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.109443 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.118400 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.128778 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.128826 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.128834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.128846 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.128856 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.129416 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.138595 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.148307 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.159262 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.170957 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.183966 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.196459 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.207119 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.225322 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:06Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.230846 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.230867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.230875 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.230889 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.230898 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.334922 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.335018 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.335044 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.335094 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.335111 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.438834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.438870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.438880 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.438894 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.438904 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.542054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.542117 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.542131 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.542150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.542164 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.645324 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.645360 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.645369 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.645383 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.645392 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.735757 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.735807 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:06 crc kubenswrapper[4611]: E0929 12:41:06.735872 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.735775 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:06 crc kubenswrapper[4611]: E0929 12:41:06.735954 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:06 crc kubenswrapper[4611]: E0929 12:41:06.735998 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.747915 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.747960 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.747981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.748007 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.748028 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.852450 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.852486 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.852494 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.852507 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.852515 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.954105 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.954185 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.954208 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.954230 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:06 crc kubenswrapper[4611]: I0929 12:41:06.954246 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:06Z","lastTransitionTime":"2025-09-29T12:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.056480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.056521 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.056533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.056549 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.056562 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.159101 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.159144 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.159156 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.159175 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.159193 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.263310 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.263373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.263384 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.263402 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.263412 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.365482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.365515 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.365526 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.365540 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.365550 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.467980 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.468053 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.468069 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.468087 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.468100 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.570598 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.570655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.570665 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.570680 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.570692 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.673517 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.673582 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.673600 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.673654 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.673677 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.735360 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:07 crc kubenswrapper[4611]: E0929 12:41:07.735512 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.776019 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.776100 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.776127 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.776157 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.776181 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.878785 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.878822 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.878832 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.878846 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.878857 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.981527 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.981590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.981602 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.981643 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:07 crc kubenswrapper[4611]: I0929 12:41:07.981659 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:07Z","lastTransitionTime":"2025-09-29T12:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.084644 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.084689 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.084697 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.084712 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.084721 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.165146 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:08 crc kubenswrapper[4611]: E0929 12:41:08.165399 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:08 crc kubenswrapper[4611]: E0929 12:41:08.165549 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:41:24.165520831 +0000 UTC m=+71.057040447 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.188134 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.188184 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.188196 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.188214 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.188225 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.290834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.290926 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.290950 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.290985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.291012 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.394377 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.394443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.394464 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.394491 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.394508 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.498071 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.498113 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.498126 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.498144 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.498154 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.600842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.600892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.600904 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.600960 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.600976 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.704519 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.704621 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.704675 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.704711 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.704735 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.736303 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.736398 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:08 crc kubenswrapper[4611]: E0929 12:41:08.736470 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.736288 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:08 crc kubenswrapper[4611]: E0929 12:41:08.736743 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:08 crc kubenswrapper[4611]: E0929 12:41:08.736883 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.808733 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.808842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.808876 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.808914 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.808943 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.912415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.912472 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.912491 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.912514 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:08 crc kubenswrapper[4611]: I0929 12:41:08.912530 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:08Z","lastTransitionTime":"2025-09-29T12:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.015439 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.015490 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.015506 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.015528 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.015544 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.117657 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.117701 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.117717 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.117735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.117749 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.220218 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.220276 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.220289 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.220311 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.220323 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.323205 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.323261 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.323273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.323292 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.323305 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.427210 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.427278 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.427295 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.427321 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.427342 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.529998 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.530049 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.530066 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.530086 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.530102 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.633491 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.633533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.633542 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.633558 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.633571 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.737084 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:09 crc kubenswrapper[4611]: E0929 12:41:09.737319 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.737892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.737930 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.737954 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.738060 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.738075 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.840217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.840285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.840301 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.840318 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.840330 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.942694 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.942728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.942735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.942748 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:09 crc kubenswrapper[4611]: I0929 12:41:09.942756 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:09Z","lastTransitionTime":"2025-09-29T12:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.047472 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.047503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.047516 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.047532 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.047543 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.147050 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.147093 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.147109 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.147129 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.147144 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.163002 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.167516 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.167563 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.167668 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.167707 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.167747 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.182949 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.186718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.186767 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.186868 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.186896 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.186911 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.202535 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.206935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.206962 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.206979 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.207016 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.207025 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.223588 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.227327 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.228561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.228712 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.229011 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.229096 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.229204 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.239733 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.243845 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.244036 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.250043 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.250166 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.250204 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.250224 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.250269 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.254419 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.268367 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.279472 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.288679 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.300662 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.311251 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.320347 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.333037 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.345662 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.352979 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.353014 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.353023 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.353037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.353047 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.356833 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.373478 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.385178 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.395831 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.407257 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.419247 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.431907 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.448378 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:10Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.455467 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.455502 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.455511 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.455525 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.455534 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.558273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.558309 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.558319 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.558334 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.558345 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.661008 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.661039 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.661049 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.661064 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.661073 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.735422 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.735476 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.735527 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.735582 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.735677 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:10 crc kubenswrapper[4611]: E0929 12:41:10.735770 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.763073 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.763105 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.763114 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.763129 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.763138 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.866247 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.866508 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.866716 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.866879 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.867005 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.969737 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.969773 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.969784 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.969798 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:10 crc kubenswrapper[4611]: I0929 12:41:10.969808 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:10Z","lastTransitionTime":"2025-09-29T12:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.072378 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.072614 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.072776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.072866 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.072949 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.175693 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.175730 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.175739 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.175752 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.175761 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.278867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.279177 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.279271 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.279349 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.279429 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.382670 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.382919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.383040 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.383134 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.383229 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.485248 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.485284 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.485294 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.485309 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.485320 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.587605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.587660 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.587672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.587687 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.587699 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.690065 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.690105 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.690117 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.690133 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.690143 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.736308 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:11 crc kubenswrapper[4611]: E0929 12:41:11.736684 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.792892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.792933 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.792949 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.792965 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.792979 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.895905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.896165 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.896326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.896479 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.896673 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.999394 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.999598 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.999726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.999824 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:11 crc kubenswrapper[4611]: I0929 12:41:11.999898 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:11Z","lastTransitionTime":"2025-09-29T12:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.102215 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.102258 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.102269 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.102312 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.102326 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.204568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.204598 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.204606 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.204618 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.204643 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.306872 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.306919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.306929 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.306945 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.306957 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.409525 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.409591 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.409603 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.409659 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.409672 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.511891 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.511957 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.511970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.511990 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.512001 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.614389 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.614429 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.614440 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.614455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.614467 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.716529 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.716586 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.716596 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.716610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.716636 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.735967 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.736017 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:12 crc kubenswrapper[4611]: E0929 12:41:12.736100 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:12 crc kubenswrapper[4611]: E0929 12:41:12.736230 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.735990 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:12 crc kubenswrapper[4611]: E0929 12:41:12.736288 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.818894 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.818935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.818951 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.818971 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.818986 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.921357 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.921387 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.921395 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.921411 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:12 crc kubenswrapper[4611]: I0929 12:41:12.921420 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:12Z","lastTransitionTime":"2025-09-29T12:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.023765 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.023818 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.023833 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.023851 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.023863 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.126617 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.126692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.126702 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.126718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.126729 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.230040 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.230116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.230139 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.230167 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.230188 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.332994 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.333040 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.333052 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.333070 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.333081 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.435459 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.435511 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.435524 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.435541 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.435554 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.538199 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.538253 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.538264 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.538279 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.538288 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.640921 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.640999 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.641011 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.641028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.641039 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.736970 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:13 crc kubenswrapper[4611]: E0929 12:41:13.737444 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.744491 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.744520 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.744528 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.744542 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.744551 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.752544 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.764303 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.775403 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.797061 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.809447 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.822426 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.835683 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.847165 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.847198 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.847207 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.847220 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.847228 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.847691 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.858284 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.872506 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.886395 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.906240 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.919964 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.930387 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.939690 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.951467 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.951512 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.951525 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.951551 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.951570 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:13Z","lastTransitionTime":"2025-09-29T12:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.953246 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.967198 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:13 crc kubenswrapper[4611]: I0929 12:41:13.980674 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:13Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.054027 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.054069 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.054080 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.054094 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.054106 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.156877 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.157081 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.157090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.157108 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.157118 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.259525 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.259771 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.259902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.260021 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.260124 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.363141 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.363177 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.363186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.363200 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.363209 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.465505 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.465568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.465578 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.465593 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.465603 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.567948 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.567985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.567997 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.568014 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.568027 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.669982 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.670026 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.670037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.670053 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.670064 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.735370 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.735464 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:14 crc kubenswrapper[4611]: E0929 12:41:14.735783 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.735473 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:14 crc kubenswrapper[4611]: E0929 12:41:14.735876 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:14 crc kubenswrapper[4611]: E0929 12:41:14.735678 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.772214 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.772756 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.772857 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.772947 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.773167 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.876499 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.876579 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.876595 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.876651 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.876671 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.981067 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.981139 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.981152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.981176 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:14 crc kubenswrapper[4611]: I0929 12:41:14.981190 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:14Z","lastTransitionTime":"2025-09-29T12:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.083945 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.084252 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.084480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.084723 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.084837 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.187510 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.187885 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.188007 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.188106 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.188253 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.291011 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.291299 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.291501 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.291590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.291690 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.394449 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.394730 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.394854 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.394948 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.395050 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.497917 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.497946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.497955 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.497967 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.497976 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.600771 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.600839 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.600849 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.600864 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.600873 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.703365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.703398 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.703406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.703419 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.703428 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.735967 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:15 crc kubenswrapper[4611]: E0929 12:41:15.736275 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.805918 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.806183 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.806290 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.806403 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.806496 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.910120 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.910174 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.910193 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.910217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:15 crc kubenswrapper[4611]: I0929 12:41:15.910235 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:15Z","lastTransitionTime":"2025-09-29T12:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.012887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.012933 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.012942 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.012956 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.012965 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.115056 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.115096 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.115107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.115122 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.115134 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.217694 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.217729 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.217738 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.217756 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.217765 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.320020 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.320079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.320096 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.320122 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.320138 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.423137 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.423330 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.423414 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.423477 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.423535 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.525601 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.525658 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.525669 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.525685 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.525695 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.631526 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.631791 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.631884 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.631970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.632070 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735087 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735586 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735680 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735752 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735809 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735947 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735827 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:16 crc kubenswrapper[4611]: E0929 12:41:16.736118 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.735869 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:16 crc kubenswrapper[4611]: E0929 12:41:16.736311 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:16 crc kubenswrapper[4611]: E0929 12:41:16.736460 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.837531 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.837564 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.837573 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.837585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.837594 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.940465 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.940500 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.940513 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.940533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:16 crc kubenswrapper[4611]: I0929 12:41:16.940548 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:16Z","lastTransitionTime":"2025-09-29T12:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.042232 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.042273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.042282 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.042293 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.042302 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.144669 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.144712 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.144724 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.144739 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.144752 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.247287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.247326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.247336 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.247351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.247362 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.350027 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.350563 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.350762 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.350891 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.350987 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.454421 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.454471 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.454485 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.454505 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.454519 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.557818 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.557882 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.557896 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.557918 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.557934 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.661434 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.661730 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.661802 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.661872 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.661933 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.736347 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:17 crc kubenswrapper[4611]: E0929 12:41:17.736476 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.737918 4611 scope.go:117] "RemoveContainer" containerID="4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610" Sep 29 12:41:17 crc kubenswrapper[4611]: E0929 12:41:17.738425 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.764168 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.764439 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.764590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.764794 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.765094 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.868249 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.868287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.868298 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.868314 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.868326 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.971313 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.971383 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.971397 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.971423 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:17 crc kubenswrapper[4611]: I0929 12:41:17.971438 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:17Z","lastTransitionTime":"2025-09-29T12:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.074232 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.074285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.074297 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.074315 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.074329 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.177253 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.177290 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.177303 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.177320 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.177332 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.280537 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.280660 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.280692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.280723 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.280744 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.382842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.382886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.382897 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.382913 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.382925 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.485307 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.485351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.485360 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.485373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.485386 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.587727 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.587768 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.587780 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.587795 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.587804 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.689999 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.690045 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.690060 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.690080 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.690096 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.736260 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:18 crc kubenswrapper[4611]: E0929 12:41:18.736389 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.736597 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:18 crc kubenswrapper[4611]: E0929 12:41:18.736682 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.736823 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:18 crc kubenswrapper[4611]: E0929 12:41:18.736883 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.792832 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.792877 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.792886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.792898 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.792907 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.895105 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.895143 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.895152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.895166 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.895176 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.997595 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.997659 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.997672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.997689 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:18 crc kubenswrapper[4611]: I0929 12:41:18.997702 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:18Z","lastTransitionTime":"2025-09-29T12:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.100055 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.100092 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.100103 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.100118 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.100129 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.203065 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.203116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.203127 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.203145 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.203416 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.306318 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.306355 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.306363 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.306377 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.306385 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.408104 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.408365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.408434 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.408504 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.408563 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.510540 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.510841 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.510922 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.510988 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.511049 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.612895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.612935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.612946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.612985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.612997 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.715017 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.715265 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.715550 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.715662 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.715757 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.735417 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:19 crc kubenswrapper[4611]: E0929 12:41:19.735583 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.818853 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.818902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.818917 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.818935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.818947 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.920924 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.921172 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.921257 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.921336 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:19 crc kubenswrapper[4611]: I0929 12:41:19.921418 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:19Z","lastTransitionTime":"2025-09-29T12:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.024159 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.024205 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.024217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.024231 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.024245 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.125933 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.125969 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.125978 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.125993 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.126003 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.228145 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.228455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.228558 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.228680 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.228798 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.322759 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.322988 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.323054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.323127 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.323188 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.335026 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:20Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.338386 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.338418 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.338428 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.338443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.338453 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.351392 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:20Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.355753 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.355796 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.355815 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.355832 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.355841 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.369096 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:20Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.372161 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.372335 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.372487 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.372653 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.372789 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.387161 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:20Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.390542 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.390588 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.390598 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.390612 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.390637 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.401802 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:20Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.401913 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.403280 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.403317 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.403328 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.403344 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.403354 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.505496 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.505534 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.505544 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.505559 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.505567 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.608051 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.608081 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.608090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.608104 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.608113 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.710214 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.710256 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.710264 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.710278 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.710287 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.735593 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.735616 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.735664 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.735759 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.735854 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:20 crc kubenswrapper[4611]: E0929 12:41:20.735959 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.812188 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.812236 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.812248 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.812269 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.812281 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.914730 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.914777 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.914789 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.914809 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:20 crc kubenswrapper[4611]: I0929 12:41:20.914822 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:20Z","lastTransitionTime":"2025-09-29T12:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.016735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.016779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.016791 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.016808 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.016820 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.119781 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.119827 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.119844 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.119860 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.119870 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.223025 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.223284 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.223371 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.223480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.223561 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.326426 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.326466 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.326478 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.326497 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.326509 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.428714 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.428758 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.428768 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.428783 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.428792 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.531704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.531757 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.531770 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.531789 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.531801 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.634648 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.634692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.634705 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.634719 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.634729 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.735251 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:21 crc kubenswrapper[4611]: E0929 12:41:21.735578 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.736668 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.736698 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.736707 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.736720 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.736731 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.745200 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.838879 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.838905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.838920 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.838934 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.838944 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.941319 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.941351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.941360 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.941374 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:21 crc kubenswrapper[4611]: I0929 12:41:21.941382 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:21Z","lastTransitionTime":"2025-09-29T12:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.043167 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.043200 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.043212 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.043228 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.043238 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.145058 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.145106 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.145117 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.145135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.145480 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.247351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.247373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.247405 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.247417 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.247426 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.349839 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.349878 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.349887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.349902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.349929 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.452385 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.452641 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.452728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.452796 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.452858 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.554853 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.555107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.555197 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.555274 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.555387 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.658376 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.658748 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.658980 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.659180 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.659358 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.735508 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.735556 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:22 crc kubenswrapper[4611]: E0929 12:41:22.735610 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:22 crc kubenswrapper[4611]: E0929 12:41:22.735705 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.735519 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:22 crc kubenswrapper[4611]: E0929 12:41:22.735812 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.761942 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.761968 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.761977 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.761989 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.761998 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.863985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.864028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.864038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.864054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.864064 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.966186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.966227 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.966238 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.966255 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:22 crc kubenswrapper[4611]: I0929 12:41:22.966265 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:22Z","lastTransitionTime":"2025-09-29T12:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.068091 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.068131 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.068140 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.068157 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.068168 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.170617 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.170678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.170688 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.170703 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.170713 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.272884 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.273164 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.273245 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.273337 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.273431 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.376431 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.376471 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.376482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.376499 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.376510 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.478289 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.478532 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.478597 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.478722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.478804 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.580679 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.580721 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.580731 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.580746 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.580757 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.682689 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.682726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.682739 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.682755 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.682767 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.736140 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:23 crc kubenswrapper[4611]: E0929 12:41:23.736353 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.747961 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.758723 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.774536 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.785184 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.785218 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.785227 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.785240 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.785251 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.785653 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.794720 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.809505 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.825506 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.840388 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.858149 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.877452 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.887058 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.887098 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.887107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.887122 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.887132 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.931946 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.943300 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.955513 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.966584 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.976438 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.989297 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.989330 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.989339 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.989352 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.989361 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:23Z","lastTransitionTime":"2025-09-29T12:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:23 crc kubenswrapper[4611]: I0929 12:41:23.989509 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.000805 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:23Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.017704 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:24Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.030148 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:24Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.091480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.091512 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.091522 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.091536 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.091547 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.193722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.193754 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.193764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.193779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.193790 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.217685 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:24 crc kubenswrapper[4611]: E0929 12:41:24.217910 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:24 crc kubenswrapper[4611]: E0929 12:41:24.218023 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:41:56.21799303 +0000 UTC m=+103.109512676 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.295524 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.295561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.295569 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.295583 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.295594 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.398126 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.398162 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.398170 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.398185 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.398193 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.500074 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.500175 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.500186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.500203 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.500213 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.602499 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.602540 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.602551 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.602568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.602579 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.704936 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.704974 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.704984 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.705005 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.705016 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.735517 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.735534 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.735517 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:24 crc kubenswrapper[4611]: E0929 12:41:24.735615 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:24 crc kubenswrapper[4611]: E0929 12:41:24.735691 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:24 crc kubenswrapper[4611]: E0929 12:41:24.735826 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.807905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.807951 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.807962 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.807976 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.807989 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.909762 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.909826 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.909840 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.909856 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:24 crc kubenswrapper[4611]: I0929 12:41:24.909868 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:24Z","lastTransitionTime":"2025-09-29T12:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.012149 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.012178 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.012186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.012198 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.012208 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.114727 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.114773 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.114783 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.114797 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.114806 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.217932 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.217972 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.217981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.217997 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.218008 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.320391 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.320453 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.320466 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.320482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.320496 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.422322 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.422359 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.422370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.422384 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.422393 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.524320 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.524358 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.524367 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.524381 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.524393 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.626150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.626187 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.626195 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.626208 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.626217 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.728809 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.728858 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.728871 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.728887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.728898 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.735369 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:25 crc kubenswrapper[4611]: E0929 12:41:25.735504 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.831002 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.831043 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.831053 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.831066 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.831076 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.932976 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.933030 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.933041 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.933058 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:25 crc kubenswrapper[4611]: I0929 12:41:25.933070 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:25Z","lastTransitionTime":"2025-09-29T12:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.034851 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.034907 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.034919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.034939 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.034954 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.137020 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.137067 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.137079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.137099 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.137113 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.239276 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.239321 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.239330 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.239343 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.239352 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.341506 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.341571 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.341585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.341600 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.341610 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.443538 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.443573 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.443582 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.443600 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.443609 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.545726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.545762 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.545773 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.545792 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.545806 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.648092 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.648126 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.648136 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.648149 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.648158 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.736234 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.736290 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:26 crc kubenswrapper[4611]: E0929 12:41:26.736349 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.736299 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:26 crc kubenswrapper[4611]: E0929 12:41:26.736409 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:26 crc kubenswrapper[4611]: E0929 12:41:26.736460 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.749660 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.749690 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.749701 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.749715 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.749727 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.851847 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.851881 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.851892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.851907 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.851919 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.954524 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.954564 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.954574 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.954588 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:26 crc kubenswrapper[4611]: I0929 12:41:26.954598 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:26Z","lastTransitionTime":"2025-09-29T12:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.057248 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.057283 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.057291 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.057305 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.057313 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.080362 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/0.log" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.080420 4611 generic.go:334] "Generic (PLEG): container finished" podID="18731b4e-6360-4d87-b586-0a9dc6b5af1e" containerID="c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7" exitCode=1 Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.080455 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerDied","Data":"c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.080877 4611 scope.go:117] "RemoveContainer" containerID="c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.100319 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.112395 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.123745 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.131972 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.142949 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.162238 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.162460 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.162483 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.162493 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.162507 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.162517 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.172773 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.186983 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.198079 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.206750 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.225926 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.238256 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.249374 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.263499 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.266841 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.266883 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.266892 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.266906 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.266915 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.277643 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.289370 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.306424 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.318329 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.330921 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:27Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.369118 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.369387 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.369473 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.369560 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.369658 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.472097 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.472136 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.472146 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.472161 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.472171 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.574124 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.574379 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.574469 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.574568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.574668 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.676578 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.676612 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.676641 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.676654 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.676664 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.735671 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:27 crc kubenswrapper[4611]: E0929 12:41:27.735800 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.779097 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.779130 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.779138 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.779150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.779159 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.881559 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.881889 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.881954 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.882022 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.882105 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.983976 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.984242 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.984309 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.984371 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:27 crc kubenswrapper[4611]: I0929 12:41:27.984435 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:27Z","lastTransitionTime":"2025-09-29T12:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.084603 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/0.log" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.084669 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerStarted","Data":"8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.085899 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.085922 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.085931 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.085944 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.085955 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.099157 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.112254 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.122961 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.132957 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.146524 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.168410 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.183362 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.189234 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.189277 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.189287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.189303 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.189313 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.193068 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.204102 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.214969 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.227870 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.238109 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.250352 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.262444 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.278618 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.286743 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.291770 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.291805 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.291816 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.291829 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.291839 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.306129 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.315281 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.326091 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:28Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.393744 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.393770 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.393778 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.393790 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.393798 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.495904 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.495932 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.495941 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.495954 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.495962 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.599272 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.599311 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.599325 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.599342 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.599353 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.702086 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.702119 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.702183 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.702199 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.702208 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.735838 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.735846 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.735909 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:28 crc kubenswrapper[4611]: E0929 12:41:28.736261 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:28 crc kubenswrapper[4611]: E0929 12:41:28.736157 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:28 crc kubenswrapper[4611]: E0929 12:41:28.736022 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.804618 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.804674 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.804682 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.804698 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.804708 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.907246 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.907284 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.907294 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.907308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:28 crc kubenswrapper[4611]: I0929 12:41:28.907318 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:28Z","lastTransitionTime":"2025-09-29T12:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.010314 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.010358 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.010367 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.010380 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.010388 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.112072 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.112123 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.112132 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.112146 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.112156 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.214243 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.214286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.214295 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.214309 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.214317 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.316246 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.316287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.316298 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.316315 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.316326 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.418743 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.418769 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.418778 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.418790 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.418799 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.521509 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.521572 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.521894 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.521931 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.521948 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.624089 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.624129 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.624142 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.624160 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.624172 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.726483 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.726543 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.726558 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.726585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.726605 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.735845 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:29 crc kubenswrapper[4611]: E0929 12:41:29.735979 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.828758 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.828818 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.828840 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.828868 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.828890 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.931344 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.931564 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.931669 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.931764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:29 crc kubenswrapper[4611]: I0929 12:41:29.931830 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:29Z","lastTransitionTime":"2025-09-29T12:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.034367 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.034411 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.034423 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.034441 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.034457 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.136831 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.136862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.136870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.136886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.136894 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.238764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.239030 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.239099 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.239174 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.239239 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.341329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.341361 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.341370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.341384 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.341392 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.443663 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.443907 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.444010 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.444128 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.444206 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.547200 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.547268 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.547292 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.547322 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.547349 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.606903 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.606959 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.606979 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.607012 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.607036 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.623389 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:30Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.628242 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.628283 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.628297 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.628314 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.628325 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.641052 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:30Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.646070 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.646375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.646990 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.647417 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.647557 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.668907 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:30Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.672749 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.672968 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.673108 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.673286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.673431 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.688466 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:30Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.692270 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.692305 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.692316 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.692332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.692343 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.704649 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:30Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.705128 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.706394 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.706421 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.706445 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.706458 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.706467 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.736291 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.736340 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.736365 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.736929 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.737040 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:30 crc kubenswrapper[4611]: E0929 12:41:30.737125 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.737383 4611 scope.go:117] "RemoveContainer" containerID="4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.808554 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.808585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.808594 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.808608 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.808619 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.911860 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.911888 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.911899 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.911983 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:30 crc kubenswrapper[4611]: I0929 12:41:30.911996 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:30Z","lastTransitionTime":"2025-09-29T12:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.013862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.013887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.013895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.013907 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.013915 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.094819 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/2.log" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.096897 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.097352 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.114352 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.115485 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.115998 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.116020 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.116036 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.116069 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.126516 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.138403 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.149848 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.164227 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.175268 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.190974 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.201258 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.218243 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.218288 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.218300 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.218317 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.218329 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.221486 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.237549 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.252095 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.264975 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.274057 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.295266 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.307026 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.320457 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.320692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.320760 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.320848 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.320928 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.324088 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.337265 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.349261 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.359081 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:31Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.423082 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.423121 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.423132 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.423148 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.423159 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.525063 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.525298 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.525374 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.525458 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.525543 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.628064 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.628357 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.628447 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.628562 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.628653 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.730515 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.730553 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.730561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.730577 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.730588 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.735874 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:31 crc kubenswrapper[4611]: E0929 12:41:31.736001 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.833066 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.833287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.833381 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.833458 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.833561 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.935558 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.935944 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.936012 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.936082 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:31 crc kubenswrapper[4611]: I0929 12:41:31.936152 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:31Z","lastTransitionTime":"2025-09-29T12:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.038916 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.038987 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.039011 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.039038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.039054 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.102007 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/3.log" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.103087 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/2.log" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.106676 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" exitCode=1 Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.106783 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.107056 4611 scope.go:117] "RemoveContainer" containerID="4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.107479 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:41:32 crc kubenswrapper[4611]: E0929 12:41:32.107721 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.122267 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.137818 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.140893 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.140920 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.140931 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.140945 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.140955 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.151246 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.161897 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.173353 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.189756 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.202066 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.212004 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.222538 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.231513 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.241984 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.243525 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.243575 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.243593 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.243615 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.243652 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.253221 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.263046 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.278989 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e9e69a70641c3e119a8d227a8b30566190b370af288726f694f6425427d6610\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:04Z\\\",\\\"message\\\":\\\"try setup to complete in iterateRetryResources\\\\nI0929 12:41:04.494988 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.494997 6174 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495001 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-d7d72\\\\nI0929 12:41:04.495005 6174 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-csch6\\\\nI0929 12:41:04.495012 6174 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-csch6 in node crc\\\\nI0929 12:41:04.494971 6174 services_controller.go:445] Built service openshift-kube-apiserver/apiserver LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 12:41:04.495020 6174 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-csch6 after 0 failed attempt(s)\\\\nF0929 12:41:04.495022 6174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:31Z\\\",\\\"message\\\":\\\"ck:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623570 6520 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623661 6520 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-config-operator/metrics\\\\\\\"}\\\\nI0929 12:41:31.623681 6520 services_controller.go:360] Finished syncing service metrics on namespace openshift-config-operator for network=default : 1.212185ms\\\\nI0929 12:41:31.623694 6520 services_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nF0929 12:41:31.623672 6520 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.288960 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.299314 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.309812 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.321330 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.333269 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:32Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.345594 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.345634 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.345644 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.345656 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.345666 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.447965 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.448003 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.448013 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.448026 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.448036 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.550199 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.550236 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.550245 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.550258 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.550267 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.652870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.652911 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.652919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.652934 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.652942 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.735985 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.736016 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.735985 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:32 crc kubenswrapper[4611]: E0929 12:41:32.736092 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:32 crc kubenswrapper[4611]: E0929 12:41:32.736216 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:32 crc kubenswrapper[4611]: E0929 12:41:32.736284 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.755678 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.755708 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.755718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.755732 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.755742 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.861084 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.861121 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.861131 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.861152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.861167 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.964286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.964590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.964701 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.964789 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:32 crc kubenswrapper[4611]: I0929 12:41:32.964859 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:32Z","lastTransitionTime":"2025-09-29T12:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.067677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.067738 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.067749 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.067766 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.067776 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.111480 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/3.log" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.114765 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:41:33 crc kubenswrapper[4611]: E0929 12:41:33.114979 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.128236 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.143613 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.157714 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.171778 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.171823 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.171834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.171849 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.171859 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.172161 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.185652 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.205711 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.223311 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.242077 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.257953 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.272837 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.274803 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.274834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.274847 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.274866 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.274924 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.286399 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.313349 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.331390 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:31Z\\\",\\\"message\\\":\\\"ck:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623570 6520 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623661 6520 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-config-operator/metrics\\\\\\\"}\\\\nI0929 12:41:31.623681 6520 services_controller.go:360] Finished syncing service metrics on namespace openshift-config-operator for network=default : 1.212185ms\\\\nI0929 12:41:31.623694 6520 services_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nF0929 12:41:31.623672 6520 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.343257 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.357410 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.370570 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.376788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.376838 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.376851 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.376870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.376883 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.386051 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.397980 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.406471 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.479130 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.479343 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.479457 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.479538 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.479607 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.582333 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.582618 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.582716 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.582821 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.582911 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.685327 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.685364 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.685371 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.685385 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.685393 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.735686 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:33 crc kubenswrapper[4611]: E0929 12:41:33.735863 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.749830 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.762718 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.774407 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.784373 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.787455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.787495 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.787503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.787518 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.787530 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.795383 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.807315 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.816610 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.841300 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:31Z\\\",\\\"message\\\":\\\"ck:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623570 6520 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623661 6520 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-config-operator/metrics\\\\\\\"}\\\\nI0929 12:41:31.623681 6520 services_controller.go:360] Finished syncing service metrics on namespace openshift-config-operator for network=default : 1.212185ms\\\\nI0929 12:41:31.623694 6520 services_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nF0929 12:41:31.623672 6520 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.853594 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.865688 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.877963 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.890431 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.890568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.890680 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.890904 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.891162 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.893920 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.905278 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.915011 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.927068 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.940486 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.953837 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.971841 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.986710 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:33Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.993432 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.993498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.993508 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.993522 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:33 crc kubenswrapper[4611]: I0929 12:41:33.993530 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:33Z","lastTransitionTime":"2025-09-29T12:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.095079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.095123 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.095135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.095150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.095160 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.197422 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.197464 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.197479 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.197496 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.197506 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.299547 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.299599 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.299611 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.299653 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.299666 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.401429 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.401481 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.401498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.401516 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.401531 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.504027 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.504098 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.504116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.504888 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.504951 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.608314 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.608369 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.608388 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.608410 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.608427 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.710604 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.710645 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.710655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.710667 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.710676 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.736008 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.736060 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.736014 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:34 crc kubenswrapper[4611]: E0929 12:41:34.736176 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:34 crc kubenswrapper[4611]: E0929 12:41:34.736266 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:34 crc kubenswrapper[4611]: E0929 12:41:34.736377 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.813142 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.813195 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.813212 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.813234 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.813249 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.916282 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.916334 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.916353 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.916373 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:34 crc kubenswrapper[4611]: I0929 12:41:34.916386 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:34Z","lastTransitionTime":"2025-09-29T12:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.021456 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.021492 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.021503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.021517 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.021528 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.124187 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.124239 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.124256 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.124280 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.124297 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.227186 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.227227 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.227239 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.227255 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.227266 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.329415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.329450 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.329462 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.329477 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.329487 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.431142 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.431376 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.431438 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.431553 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.431684 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.534931 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.535173 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.535274 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.535340 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.535396 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.638063 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.638577 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.638823 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.638992 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.639122 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.735762 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:35 crc kubenswrapper[4611]: E0929 12:41:35.735983 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.741338 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.741374 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.741383 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.741396 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.741406 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.844064 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.844119 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.844135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.844157 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.844178 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.947030 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.947063 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.947075 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.947089 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:35 crc kubenswrapper[4611]: I0929 12:41:35.947130 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:35Z","lastTransitionTime":"2025-09-29T12:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.050052 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.050698 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.050811 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.050902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.050986 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.153772 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.154120 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.154270 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.154418 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.154567 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.257592 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.257653 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.257663 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.257676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.257685 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.360067 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.360380 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.360467 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.360540 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.360601 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.463431 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.463475 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.463489 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.463508 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.463524 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.565768 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.565820 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.565835 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.565855 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.565869 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.641612 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.641746 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.641785 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.64176481 +0000 UTC m=+147.533284416 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.641840 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.641868 4611 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.641951 4611 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.641964 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.641943175 +0000 UTC m=+147.533462791 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.642002 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.641992716 +0000 UTC m=+147.533512322 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.668357 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.668396 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.668408 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.668424 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.668435 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.735863 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.735940 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.735994 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.735962 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.736122 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.736221 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.743380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.743416 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743516 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743535 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743540 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743550 4611 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743556 4611 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743560 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743608 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.743592442 +0000 UTC m=+147.635112048 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:36 crc kubenswrapper[4611]: E0929 12:41:36.743652 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.743645234 +0000 UTC m=+147.635164840 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.771666 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.772063 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.772167 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.772323 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.772419 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.874784 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.874861 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.874876 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.874895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.874908 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.977703 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.977760 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.977773 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.977796 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:36 crc kubenswrapper[4611]: I0929 12:41:36.977808 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:36Z","lastTransitionTime":"2025-09-29T12:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.080239 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.080295 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.080307 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.080345 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.080358 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.182596 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.182673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.182694 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.182739 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.182750 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.284707 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.284763 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.284776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.284795 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.284809 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.387375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.387415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.387426 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.387443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.387453 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.490118 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.490175 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.490192 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.490209 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.490218 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.592239 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.592273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.592282 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.592296 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.592306 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.694820 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.694861 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.694870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.694884 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.694898 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.735556 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:37 crc kubenswrapper[4611]: E0929 12:41:37.735700 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.797022 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.797111 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.797122 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.797134 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.797144 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.899935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.899972 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.899981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.899999 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:37 crc kubenswrapper[4611]: I0929 12:41:37.900016 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:37Z","lastTransitionTime":"2025-09-29T12:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.002076 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.002121 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.002130 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.002332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.002341 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.104509 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.104549 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.104561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.104576 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.104587 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.207200 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.207455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.207553 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.207648 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.207717 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.309677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.309726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.309737 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.309754 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.309764 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.412730 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.412769 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.412777 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.412793 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.412802 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.514956 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.514995 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.515006 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.515023 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.515036 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.622156 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.622412 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.622424 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.622441 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.622455 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.724498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.724546 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.724556 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.724579 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.724590 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.735911 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.735940 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:38 crc kubenswrapper[4611]: E0929 12:41:38.736066 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.735908 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:38 crc kubenswrapper[4611]: E0929 12:41:38.736262 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:38 crc kubenswrapper[4611]: E0929 12:41:38.736409 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.827498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.827556 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.827566 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.827583 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.827595 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.930378 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.930411 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.930421 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.930438 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:38 crc kubenswrapper[4611]: I0929 12:41:38.930449 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:38Z","lastTransitionTime":"2025-09-29T12:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.032868 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.032918 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.032930 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.032946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.032957 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.135365 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.135422 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.135431 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.135448 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.135477 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.237986 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.238020 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.238063 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.238082 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.238094 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.340009 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.340050 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.340066 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.340079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.340089 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.442835 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.443079 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.443090 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.443104 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.443112 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.545674 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.545710 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.545722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.545738 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.545748 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.648684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.648728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.648740 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.648756 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.648764 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.735988 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:39 crc kubenswrapper[4611]: E0929 12:41:39.736148 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.750761 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.750807 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.750816 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.750828 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.750838 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.852425 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.852473 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.852482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.852503 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.852513 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.954470 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.954517 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.954528 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.954544 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:39 crc kubenswrapper[4611]: I0929 12:41:39.954558 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:39Z","lastTransitionTime":"2025-09-29T12:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.057164 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.057199 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.057210 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.057225 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.057234 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.159887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.159936 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.159951 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.159967 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.159981 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.262151 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.262214 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.262236 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.262265 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.262287 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.364386 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.364434 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.364442 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.364455 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.364466 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.467342 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.467379 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.467398 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.467415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.467509 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.576637 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.576705 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.576717 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.576731 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.576740 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.679459 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.679496 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.679505 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.679518 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.679529 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.736039 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.736072 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.736119 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.736169 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.736294 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.736417 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.782174 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.782250 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.782263 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.782279 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.782290 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.884516 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.884593 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.884607 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.884662 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.884679 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.926425 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.926486 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.926498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.926514 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.926537 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.937682 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.940894 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.940918 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.940926 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.940940 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.940949 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.953705 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.956905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.956952 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.956991 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.957008 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.957019 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.968136 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.971208 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.971251 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.971259 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.971273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.971282 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.982857 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.985990 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.986024 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.986033 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.986046 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.986055 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.995926 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:40Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:40 crc kubenswrapper[4611]: E0929 12:41:40.996061 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.997222 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.997257 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.997268 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.997283 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:40 crc kubenswrapper[4611]: I0929 12:41:40.997295 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:40Z","lastTransitionTime":"2025-09-29T12:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.099734 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.099770 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.099779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.099793 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.099803 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.201981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.202017 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.202025 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.202038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.202047 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.304252 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.304308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.304318 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.304331 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.304340 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.406308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.406341 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.406351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.406368 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.406378 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.508372 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.508412 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.508424 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.508441 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.508454 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.635702 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.635760 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.635770 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.635785 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.635797 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.737534 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:41 crc kubenswrapper[4611]: E0929 12:41:41.737713 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.738597 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.738695 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.738718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.738743 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.738760 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.840404 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.840443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.840452 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.840468 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.840476 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.944101 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.944150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.944161 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.944177 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:41 crc kubenswrapper[4611]: I0929 12:41:41.944188 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:41Z","lastTransitionTime":"2025-09-29T12:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.046655 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.046696 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.046707 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.046722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.046733 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.148664 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.148702 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.148711 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.148726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.148735 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.251867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.251916 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.251928 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.251944 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.251981 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.353868 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.353915 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.353927 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.353954 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.353971 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.456011 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.456068 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.456085 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.456107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.456123 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.558784 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.558824 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.558835 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.558852 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.558865 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.661438 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.661475 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.661487 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.661502 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.661511 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.735678 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:42 crc kubenswrapper[4611]: E0929 12:41:42.735822 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.735952 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.735948 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:42 crc kubenswrapper[4611]: E0929 12:41:42.736161 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:42 crc kubenswrapper[4611]: E0929 12:41:42.736277 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.764061 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.764138 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.764164 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.764195 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.764235 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.866954 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.867030 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.867054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.867086 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.867111 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.970000 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.970054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.970067 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.970083 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:42 crc kubenswrapper[4611]: I0929 12:41:42.970095 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:42Z","lastTransitionTime":"2025-09-29T12:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.073242 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.073285 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.073298 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.073313 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.073324 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.175261 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.175306 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.175316 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.175332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.175343 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.277943 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.277981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.277994 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.278010 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.278021 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.380695 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.380744 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.380763 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.380781 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.380794 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.482658 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.483371 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.483439 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.483470 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.483486 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.585265 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.585304 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.585313 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.585326 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.585335 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.687544 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.687588 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.687599 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.687614 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.687641 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.735526 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:43 crc kubenswrapper[4611]: E0929 12:41:43.735655 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.753380 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89d0d22a-4a48-42e0-9558-c44d94c4dd50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab25fdab7e878f4ebeff18e1512e317a3647215c9429bbd9104869287943748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa66301bb175ad61b93298e64b8d3a748bbc5809e1f901b4eb403b680f06028d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be8d58be4599c25c727254bc7f5afed1be97347438df5d8d620310d66a18329a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476f4e5d8d485245dc8b4399e4f03216b723c5b3b19aa4163686f516d08c4cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9f1c5d55fe0ed07aa3b1db98969c308552f4c63e6adc17aa28f878dfbd9314\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f5d6ed7b9f0fe4d7ab4fd303a876d2163e8e7c954b9127b7b7f54a26999316\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9142292992c68e59d06a25b4a9f9a19e6077c1ec2d5d27088993f1bbf8d286f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e3f60cbdc9d485804eb743b08d839633846fcf26889e41e360095d9e02237\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.765516 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76042dd6-2a76-4aff-8ac0-ba36711c22ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7f0c645def2426e76ca66efce0661d63bcff86d1bde8c3d5016e03ac161014a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48c528338a3d5c15c0b55f46ce3ba9723e2a9663f8415757b06fa1d037f60fcf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995b3d6630d6a7cae0c7f92adc4cf1a4ce5354846c2daecc59ee54e691c808a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d8476ef986b42e37f8f436b782bdf9f3f7d8d27ea9eecc9b258a9ebb14f82\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://667fed57377b2e4d30c1b526577afc8627bb0c2e196ff9706e5647f12c679cb9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T12:40:27Z\\\",\\\"message\\\":\\\"W0929 12:40:16.988489 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 12:40:16.989591 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759149616 cert, and key in /tmp/serving-cert-3069361/serving-signer.crt, /tmp/serving-cert-3069361/serving-signer.key\\\\nI0929 12:40:17.172999 1 observer_polling.go:159] Starting file observer\\\\nW0929 12:40:17.176883 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 12:40:17.176993 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 12:40:17.177522 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3069361/tls.crt::/tmp/serving-cert-3069361/tls.key\\\\\\\"\\\\nF0929 12:40:27.588883 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf6e9b7a63085064651e00a6415bdb90430175b757d5ed21a8c81ab150ba8c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc7a4c8c4d4386078e5b8407858ac0741ab7c109ec5fca0eecc7e16e8057cd28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.777216 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-648vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49de17a8-ece1-4707-9f9a-5c192e484b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://399cd7641f99c86f52dcd860635033f5c6387543394a2eef6f7c78e3bb17b018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24cnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-648vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.788001 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"483b90ff-34ed-4569-92d9-14770b68a086\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd017e7489febd918589aa50658907f67480f15d0b61f0e484256bcec57fdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fb7b20eb9b49845586363a15bd1f47cbc7ed706bfbf988753bf4c7a9ac9b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwvsb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pk8wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.789370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.789417 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.789431 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.789448 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.789460 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.797160 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2df08da-22ae-44b9-b568-06bafc65932c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zc4s5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xtjl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.807920 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ded305c736e9156589b779ebfb63fef40f6ffed39b8ffb4d0d3ee7108c20ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f57633c787eefd3a6526e689723b37aef15428687eeba7278f57b743af8d332b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.819724 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c80d171b21d5a4719466475a068f9f1aa09d7594d428698443369d6c0a3b24dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.832314 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.847001 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-d7d72" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d9143-5ef6-484f-8ff9-31d671abb269\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceb9b46c1527cb023d2ab7e46829b5b795bf1b7e22dddeb1666fbab8b3c04fb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44e3bd07c33c56f2cfb315863b13774913a85e7c23960b20d632f183fc425a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55753cbfd95a000db3befed35721d85484a69cf32aaa955afa27ceb4482755ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59a8cfd4eb89719a48a3c68e42158034254b5b74b452d2f6ba136fa2dfb7fc3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b26debf409318e0b5afd50c09842286bd108d294ceada303fe2147670b94a490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6755b6418774739ceab769d250fa0d1960296fd75ae0e99a5c4757a2807bb5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d8a457ab72422f54824a258afef6100f9db2241671969f040a5544176b7d885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dzdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-d7d72\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.858987 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-csch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"18731b4e-6360-4d87-b586-0a9dc6b5af1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:26Z\\\",\\\"message\\\":\\\"2025-09-29T12:40:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42\\\\n2025-09-29T12:40:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c684dafb-e389-4f2b-8865-741ee97e9a42 to /host/opt/cni/bin/\\\\n2025-09-29T12:40:41Z [verbose] multus-daemon started\\\\n2025-09-29T12:40:41Z [verbose] Readiness Indicator file check\\\\n2025-09-29T12:41:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:41:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5dxj8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-multus\"/\"multus-csch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.870196 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kf52c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ffe5254-1d23-44e3-8d04-5da256598928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb809d5b4ec1b10741ffba385fc0e64852d35930e6b44a29e72e73fe21aee039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cbtwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kf52c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.886442 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T12:41:31Z\\\",\\\"message\\\":\\\"ck:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623570 6520 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 12:41:31.623661 6520 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-config-operator/metrics\\\\\\\"}\\\\nI0929 12:41:31.623681 6520 services_controller.go:360] Finished syncing service metrics on namespace openshift-config-operator for network=default : 1.212185ms\\\\nI0929 12:41:31.623694 6520 services_controller.go:356] Processing sync for service openshift-marketplace/certified-operators for network=default\\\\nF0929 12:41:31.623672 6520 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T12:41:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2pdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p95nv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.891172 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.891211 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.891222 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.891238 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.891250 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.897463 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5676946a-e9b7-4f28-959f-0e1d30bd7066\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6aff07593b66b674b53045ca6a1ab3d707f0fa4f6925ebab7c1ed93920644260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31c7191a5c2887f76d630e0d773e8cbb8cec71ba2082bcb1fbf22d660db00254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4787253ce0fe27774ff1bdb43c048a1f2432fb39f831d0158e4b95e48a65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1397764c814fff51e208af417444dcdf062cd262904425a861300042fe12a301\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.909082 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3641d470-1b94-4db8-baf3-5a35a60d6f53\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eb57d4e71f0308c903e5e5f4a607d33fe5afb49070191f2772c8377b2130c9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02a7021e420308e20c966617e47933889cbc2df56bd6457e6479804014160\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2fed72625a10e1e3ab6c0038f89390fd452f2d0b16ebd26aff650bf753563b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6479782abcff303d6977511d03dbb908732cba6cd3de4964681c3fe3b2b553a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.920975 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.931267 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.942072 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fea0777-8bbe-4100-806a-2580c80c902c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43434856b6952feb7c9bfa5fe98b760f239effebc8b32f520f4385bdff6c08a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svqc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d2gnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.951022 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a51361e-c4a0-4737-8118-7ff9021e6aa5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b1865130fbd84520d03f7595a96953e51e6ee1604ac092a613df287f8c4001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f98fdcfa412111cb240131672f1493a3e30ba42764c3c3d2379663319ecf2be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T12:40:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T12:40:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T12:40:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.962092 4611 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T12:40:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de63c3eedfe15f3cc7d79fac1e9adaa6905986bf66a9af1b0a9bf2f810f33f94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T12:40:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:43Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.994676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.994704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.994714 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.994742 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:43 crc kubenswrapper[4611]: I0929 12:41:43.994752 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:43Z","lastTransitionTime":"2025-09-29T12:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.097534 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.097577 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.097590 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.097611 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.097654 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.199790 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.200272 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.200343 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.200482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.200541 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.302867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.302906 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.302917 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.302934 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.302948 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.406217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.406746 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.406933 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.407089 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.407255 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.509840 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.510367 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.510444 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.510514 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.510574 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.612208 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.612317 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.612335 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.612356 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.612371 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.715141 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.715169 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.715180 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.715195 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.715205 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.736032 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.736108 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.736038 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:44 crc kubenswrapper[4611]: E0929 12:41:44.736189 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:44 crc kubenswrapper[4611]: E0929 12:41:44.736360 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:44 crc kubenswrapper[4611]: E0929 12:41:44.736403 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.818382 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.818453 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.818470 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.818492 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.818507 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.920554 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.920598 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.920610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.920660 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:44 crc kubenswrapper[4611]: I0929 12:41:44.920673 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:44Z","lastTransitionTime":"2025-09-29T12:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.022939 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.022965 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.022973 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.022985 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.022995 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.124586 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.124646 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.124686 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.124702 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.124711 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.226800 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.227185 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.227349 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.227492 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.227683 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.330407 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.330452 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.330464 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.330480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.330492 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.432292 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.432536 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.432717 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.432861 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.432927 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.534816 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.534846 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.534854 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.534867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.534875 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.636617 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.636941 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.637032 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.637116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.637234 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.735724 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:45 crc kubenswrapper[4611]: E0929 12:41:45.736050 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.736616 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:41:45 crc kubenswrapper[4611]: E0929 12:41:45.736761 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.738870 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.738898 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.738907 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.738919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.738929 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.842397 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.842441 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.842450 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.842466 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.842475 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.944393 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.944427 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.944464 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.944478 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:45 crc kubenswrapper[4611]: I0929 12:41:45.944486 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:45Z","lastTransitionTime":"2025-09-29T12:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.046797 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.046842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.046854 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.046873 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.046888 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.148382 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.148418 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.148430 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.148446 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.148458 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.251085 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.251116 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.251125 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.251139 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.251147 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.353528 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.353562 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.353573 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.353588 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.353599 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.455357 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.455406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.455421 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.455437 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.455447 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.557863 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.557903 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.557914 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.557929 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.557939 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.660209 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.660244 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.660252 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.660266 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.660275 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.736216 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.736216 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.736235 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:46 crc kubenswrapper[4611]: E0929 12:41:46.736348 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:46 crc kubenswrapper[4611]: E0929 12:41:46.736569 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:46 crc kubenswrapper[4611]: E0929 12:41:46.736677 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.762181 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.762214 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.762223 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.762237 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.762274 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.864831 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.864879 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.864891 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.864909 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.864920 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.966813 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.966862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.966871 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.966884 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:46 crc kubenswrapper[4611]: I0929 12:41:46.966893 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:46Z","lastTransitionTime":"2025-09-29T12:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.068843 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.068881 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.068889 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.068905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.068915 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.171121 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.171160 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.171175 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.171190 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.171200 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.273452 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.273489 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.273498 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.273512 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.273522 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.376329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.376405 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.376415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.376428 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.376456 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.478609 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.478699 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.478715 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.478728 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.478735 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.581461 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.581502 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.581513 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.581531 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.581542 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.683833 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.683886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.683895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.683910 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.683920 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.735744 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:47 crc kubenswrapper[4611]: E0929 12:41:47.735869 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.786970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.787030 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.787046 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.787067 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.787082 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.889499 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.889690 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.889708 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.889732 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.889744 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.992651 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.992697 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.992706 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.992720 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:47 crc kubenswrapper[4611]: I0929 12:41:47.992728 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:47Z","lastTransitionTime":"2025-09-29T12:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.095745 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.095789 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.095799 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.095816 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.095826 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.197430 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.197469 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.197480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.197497 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.197511 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.300435 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.300482 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.300493 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.300511 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.300521 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.403176 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.403243 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.403253 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.403267 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.403276 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.505256 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.505317 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.505329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.505346 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.505357 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.607378 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.607431 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.607443 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.607460 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.607472 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.709474 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.709519 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.709527 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.709541 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.709550 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.736160 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.736442 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.736445 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:48 crc kubenswrapper[4611]: E0929 12:41:48.736515 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:48 crc kubenswrapper[4611]: E0929 12:41:48.736580 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:48 crc kubenswrapper[4611]: E0929 12:41:48.736695 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.811713 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.811746 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.811757 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.811772 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.811782 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.913735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.913782 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.913814 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.913833 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:48 crc kubenswrapper[4611]: I0929 12:41:48.913845 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:48Z","lastTransitionTime":"2025-09-29T12:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.015946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.016000 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.016023 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.016043 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.016057 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.118104 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.118175 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.118191 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.118207 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.118251 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.220589 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.220647 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.220664 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.220682 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.220693 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.323447 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.323545 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.323559 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.323574 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.323584 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.427468 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.427507 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.427515 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.427529 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.427540 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.548092 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.548135 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.548143 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.548158 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.548169 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.650701 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.650735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.650745 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.650762 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.650792 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.736123 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:49 crc kubenswrapper[4611]: E0929 12:41:49.736260 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.752686 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.752720 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.752732 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.752748 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.752758 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.854931 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.854961 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.854970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.854984 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.854993 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.957315 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.957364 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.957374 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.957388 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:49 crc kubenswrapper[4611]: I0929 12:41:49.957397 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:49Z","lastTransitionTime":"2025-09-29T12:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.059912 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.060228 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.060328 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.060423 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.060491 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.162885 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.162936 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.162946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.162960 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.162970 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.265196 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.265257 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.265269 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.265282 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.265291 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.367673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.367901 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.367967 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.368060 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.368118 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.470844 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.470889 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.470902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.470919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.470930 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.572856 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.572897 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.572905 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.572925 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.572934 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.674486 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.675104 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.675202 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.675284 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.675372 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.735845 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:50 crc kubenswrapper[4611]: E0929 12:41:50.736145 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.735981 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:50 crc kubenswrapper[4611]: E0929 12:41:50.736316 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.735924 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:50 crc kubenswrapper[4611]: E0929 12:41:50.736474 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.778225 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.778315 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.778338 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.778371 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.778402 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.880391 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.880447 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.880459 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.880486 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.880502 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.982949 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.982981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.982989 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.983003 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:50 crc kubenswrapper[4611]: I0929 12:41:50.983012 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:50Z","lastTransitionTime":"2025-09-29T12:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.084970 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.085012 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.085023 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.085041 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.085053 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.187328 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.187363 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.187375 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.187391 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.187401 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.289101 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.289143 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.289152 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.289165 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.289173 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.391033 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.391057 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.391065 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.391078 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.391086 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.391990 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.392017 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.392028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.392039 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.392047 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.403029 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.406423 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.406464 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.406472 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.406487 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.406495 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.418009 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.421475 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.421500 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.421532 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.421546 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.421555 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.433531 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.437011 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.437042 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.437239 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.437261 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.437272 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.447939 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.450662 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.450694 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.450704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.450719 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.450728 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.461121 4611 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T12:41:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e5548895-0aa9-44f2-872c-0702b2be968c\\\",\\\"systemUUID\\\":\\\"b577e751-f004-45f9-a489-c870d2ba486c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T12:41:51Z is after 2025-08-24T17:21:41Z" Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.461315 4611 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.493667 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.493726 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.493740 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.493759 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.493771 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.596408 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.596439 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.596448 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.596463 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.596487 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.698334 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.698370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.698378 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.698394 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.698402 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.736726 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:51 crc kubenswrapper[4611]: E0929 12:41:51.736811 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.800677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.800722 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.800735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.800752 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.800764 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.903179 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.903226 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.903237 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.903254 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:51 crc kubenswrapper[4611]: I0929 12:41:51.903266 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:51Z","lastTransitionTime":"2025-09-29T12:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.005867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.005921 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.005930 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.005947 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.005959 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.107895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.107942 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.107959 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.107976 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.107998 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.210170 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.210200 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.210207 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.210220 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.210230 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.312477 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.312519 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.312533 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.312549 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.312561 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.415097 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.415134 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.415145 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.415158 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.415167 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.517917 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.517946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.517954 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.517966 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.517976 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.620838 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.620878 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.620888 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.620903 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.620914 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.722965 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.723003 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.723014 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.723028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.723039 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.735407 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:52 crc kubenswrapper[4611]: E0929 12:41:52.735538 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.735423 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:52 crc kubenswrapper[4611]: E0929 12:41:52.735609 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.735408 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:52 crc kubenswrapper[4611]: E0929 12:41:52.735684 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.824829 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.824867 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.824878 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.824899 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.824910 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.926994 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.927034 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.927045 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.927063 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:52 crc kubenswrapper[4611]: I0929 12:41:52.927074 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:52Z","lastTransitionTime":"2025-09-29T12:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.029332 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.029388 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.029398 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.029442 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.029453 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.136646 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.136684 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.136692 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.136704 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.136713 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.238668 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.238708 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.238718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.238733 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.238744 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.341000 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.341052 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.341069 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.341093 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.341109 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.443187 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.443222 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.443231 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.443248 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.443257 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.546324 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.546379 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.546399 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.546423 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.546439 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.648885 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.648924 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.648935 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.648951 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.648962 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.736976 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:53 crc kubenswrapper[4611]: E0929 12:41:53.737070 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.752772 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.752809 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.752820 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.752832 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.752841 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.778090 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-648vg" podStartSLOduration=76.778070931 podStartE2EDuration="1m16.778070931s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.777488454 +0000 UTC m=+100.669008060" watchObservedRunningTime="2025-09-29 12:41:53.778070931 +0000 UTC m=+100.669590537" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.804846 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pk8wg" podStartSLOduration=75.80482538 podStartE2EDuration="1m15.80482538s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.788621238 +0000 UTC m=+100.680140844" watchObservedRunningTime="2025-09-29 12:41:53.80482538 +0000 UTC m=+100.696344986" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.834567 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=43.834549125 podStartE2EDuration="43.834549125s" podCreationTimestamp="2025-09-29 12:41:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.817294653 +0000 UTC m=+100.708814279" watchObservedRunningTime="2025-09-29 12:41:53.834549125 +0000 UTC m=+100.726068731" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.850684 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=81.850669844 podStartE2EDuration="1m21.850669844s" podCreationTimestamp="2025-09-29 12:40:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.83507741 +0000 UTC m=+100.726597016" watchObservedRunningTime="2025-09-29 12:41:53.850669844 +0000 UTC m=+100.742189450" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.855605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.855666 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.855677 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.855695 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.855707 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.879148 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-d7d72" podStartSLOduration=75.879129582 podStartE2EDuration="1m15.879129582s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.878481473 +0000 UTC m=+100.770001079" watchObservedRunningTime="2025-09-29 12:41:53.879129582 +0000 UTC m=+100.770649188" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.902221 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-csch6" podStartSLOduration=75.902206673 podStartE2EDuration="1m15.902206673s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.902069109 +0000 UTC m=+100.793588735" watchObservedRunningTime="2025-09-29 12:41:53.902206673 +0000 UTC m=+100.793726279" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.957878 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.957911 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.957919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.957932 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.957941 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:53Z","lastTransitionTime":"2025-09-29T12:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.959426 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-kf52c" podStartSLOduration=76.959412028 podStartE2EDuration="1m16.959412028s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.925647975 +0000 UTC m=+100.817167591" watchObservedRunningTime="2025-09-29 12:41:53.959412028 +0000 UTC m=+100.850931634" Sep 29 12:41:53 crc kubenswrapper[4611]: I0929 12:41:53.972763 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.972751206 podStartE2EDuration="32.972751206s" podCreationTimestamp="2025-09-29 12:41:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:53.972529459 +0000 UTC m=+100.864049065" watchObservedRunningTime="2025-09-29 12:41:53.972751206 +0000 UTC m=+100.864270812" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.027447 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podStartSLOduration=77.027430267 podStartE2EDuration="1m17.027430267s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:54.027164089 +0000 UTC m=+100.918683695" watchObservedRunningTime="2025-09-29 12:41:54.027430267 +0000 UTC m=+100.918949873" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.059981 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.060029 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.060038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.060050 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.060077 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.068902 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=81.068884493 podStartE2EDuration="1m21.068884493s" podCreationTimestamp="2025-09-29 12:40:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:54.052706352 +0000 UTC m=+100.944225978" watchObservedRunningTime="2025-09-29 12:41:54.068884493 +0000 UTC m=+100.960404099" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.162842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.162895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.162945 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.162964 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.162974 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.264875 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.264912 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.264921 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.264934 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.264951 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.367521 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.367561 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.367572 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.367585 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.367596 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.469946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.469990 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.470000 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.470015 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.470022 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.572528 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.573054 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.573217 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.573377 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.573531 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.676540 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.676567 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.676574 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.676587 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.676596 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.735761 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.735889 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.735950 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:54 crc kubenswrapper[4611]: E0929 12:41:54.736089 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:54 crc kubenswrapper[4611]: E0929 12:41:54.736218 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:54 crc kubenswrapper[4611]: E0929 12:41:54.736386 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.778761 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.778788 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.778797 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.778828 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.778837 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.881786 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.882043 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.882130 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.882220 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.882297 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.985406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.985442 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.985451 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.985464 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:54 crc kubenswrapper[4611]: I0929 12:41:54.985472 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:54Z","lastTransitionTime":"2025-09-29T12:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.087842 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.087890 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.087900 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.087914 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.087924 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.190277 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.190312 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.190321 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.190335 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.190344 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.292415 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.292481 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.292497 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.292522 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.292542 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.394866 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.394902 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.394912 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.394929 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.394939 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.497699 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.497799 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.497812 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.497838 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.497853 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.600287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.600329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.600338 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.600351 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.600361 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.702591 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.702673 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.702685 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.702701 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.702711 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.735300 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:55 crc kubenswrapper[4611]: E0929 12:41:55.735427 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.804854 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.804887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.804896 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.804909 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.804919 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.907180 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.907218 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.907225 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.907240 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:55 crc kubenswrapper[4611]: I0929 12:41:55.907252 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:55Z","lastTransitionTime":"2025-09-29T12:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.010347 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.010404 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.010418 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.010436 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.010449 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.112904 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.112945 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.112957 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.112973 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.112984 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.215610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.215672 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.215683 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.215698 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.215708 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.235319 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:56 crc kubenswrapper[4611]: E0929 12:41:56.235472 4611 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:56 crc kubenswrapper[4611]: E0929 12:41:56.235557 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs podName:c2df08da-22ae-44b9-b568-06bafc65932c nodeName:}" failed. No retries permitted until 2025-09-29 12:43:00.235531133 +0000 UTC m=+167.127050769 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs") pod "network-metrics-daemon-xtjl8" (UID: "c2df08da-22ae-44b9-b568-06bafc65932c") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.318759 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.318812 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.318827 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.318844 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.318854 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.422368 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.422438 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.422458 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.422485 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.422509 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.524878 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.524943 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.524958 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.524978 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.524990 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.628131 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.628248 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.628273 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.628301 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.628318 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.731209 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.731244 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.731255 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.731275 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.731289 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.735851 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.735964 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.735984 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:56 crc kubenswrapper[4611]: E0929 12:41:56.736063 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:56 crc kubenswrapper[4611]: E0929 12:41:56.736367 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:56 crc kubenswrapper[4611]: E0929 12:41:56.736455 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.834776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.834843 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.834861 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.834887 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.834907 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.938605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.938663 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.938674 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.938694 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:56 crc kubenswrapper[4611]: I0929 12:41:56.938706 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:56Z","lastTransitionTime":"2025-09-29T12:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.040929 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.040959 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.040967 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.040980 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.040988 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.143906 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.143959 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.143976 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.143999 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.144038 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.246717 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.246754 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.246764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.246779 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.246791 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.348876 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.348917 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.348927 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.348944 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.348954 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.450880 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.450910 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.450920 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.450934 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.450944 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.553524 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.553576 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.553591 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.553614 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.553649 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.655995 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.656077 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.656094 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.656118 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.656134 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.735753 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:57 crc kubenswrapper[4611]: E0929 12:41:57.735958 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.758418 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.758459 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.758469 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.758487 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.758499 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.860615 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.860676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.860687 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.860703 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.860714 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.963253 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.963308 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.963322 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.963341 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:57 crc kubenswrapper[4611]: I0929 12:41:57.963354 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:57Z","lastTransitionTime":"2025-09-29T12:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.065370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.065406 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.065417 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.065433 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.065444 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.167306 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.167384 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.167400 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.167414 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.167426 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.269389 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.269452 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.269463 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.269480 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.269489 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.372468 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.372534 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.372558 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.372588 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.372610 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.475294 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.475340 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.475357 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.475378 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.475395 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.578000 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.578049 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.578062 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.578078 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.578090 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.679568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.679776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.679803 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.679886 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.679913 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.735890 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.736334 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.736354 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:41:58 crc kubenswrapper[4611]: E0929 12:41:58.736389 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:41:58 crc kubenswrapper[4611]: E0929 12:41:58.736407 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:41:58 crc kubenswrapper[4611]: E0929 12:41:58.736469 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.782372 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.782404 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.782429 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.782444 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.782453 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.885121 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.885211 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.885307 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.885363 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.885380 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.988001 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.988037 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.988051 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.988067 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:58 crc kubenswrapper[4611]: I0929 12:41:58.988077 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:58Z","lastTransitionTime":"2025-09-29T12:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.090718 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.090764 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.090776 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.090795 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.090806 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.196043 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.196107 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.196124 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.196150 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.196171 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.298984 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.299031 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.299043 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.299062 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.299077 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.401875 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.401915 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.401926 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.401946 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.401956 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.505014 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.505092 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.505108 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.505158 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.505172 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.607906 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.607965 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.607982 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.608006 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.608022 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.710758 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.710803 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.710818 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.710839 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.710853 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.736101 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:41:59 crc kubenswrapper[4611]: E0929 12:41:59.736199 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.813211 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.813245 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.813254 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.813568 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.813595 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.916265 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.916804 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.916822 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.916836 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:41:59 crc kubenswrapper[4611]: I0929 12:41:59.916847 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:41:59Z","lastTransitionTime":"2025-09-29T12:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.021652 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.021697 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.021706 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.021724 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.021734 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.124834 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.125329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.125513 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.125693 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.125827 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.228386 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.228429 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.228437 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.228451 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.228460 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.330691 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.330727 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.330735 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.330748 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.330757 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.433100 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.433147 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.433156 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.433174 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.433184 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.535339 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.535382 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.535392 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.535407 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.535416 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.637605 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.637660 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.637671 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.637686 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.637696 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.735232 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.735274 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:00 crc kubenswrapper[4611]: E0929 12:42:00.735370 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.735240 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.735988 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:42:00 crc kubenswrapper[4611]: E0929 12:42:00.736114 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p95nv_openshift-ovn-kubernetes(bfec2820-7242-4dd6-9fa5-4ebe161f99ba)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" Sep 29 12:42:00 crc kubenswrapper[4611]: E0929 12:42:00.736222 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:00 crc kubenswrapper[4611]: E0929 12:42:00.736266 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.739862 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.739883 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.739893 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.739906 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.739917 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.842002 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.842036 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.842046 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.842062 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.842072 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.945465 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.945550 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.945577 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.945610 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:00 crc kubenswrapper[4611]: I0929 12:42:00.945689 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:00Z","lastTransitionTime":"2025-09-29T12:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.047618 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.047676 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.047697 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.047751 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.047767 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.149828 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.149869 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.149880 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.149895 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.149906 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.252321 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.252360 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.252370 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.252385 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.252395 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.357149 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.357229 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.357248 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.357287 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.357312 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.460987 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.461028 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.461038 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.461055 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.461068 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.563286 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.563329 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.563344 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.563364 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.563380 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.665871 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.665919 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.665932 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.665948 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.665961 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.736818 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:01 crc kubenswrapper[4611]: E0929 12:42:01.736951 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.768003 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.768034 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.768042 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.768055 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.768064 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.838039 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.838069 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.838078 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.838091 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.838100 4611 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T12:42:01Z","lastTransitionTime":"2025-09-29T12:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.875656 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=89.875613813 podStartE2EDuration="1m29.875613813s" podCreationTimestamp="2025-09-29 12:40:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:41:54.070832149 +0000 UTC m=+100.962351755" watchObservedRunningTime="2025-09-29 12:42:01.875613813 +0000 UTC m=+108.767133419" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.876767 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd"] Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.877229 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.878750 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.879248 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.880431 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.880444 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.909656 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.909739 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.909806 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.909825 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-service-ca\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:01 crc kubenswrapper[4611]: I0929 12:42:01.909852 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011048 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011118 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011172 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011208 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-service-ca\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011235 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011241 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.011319 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.012095 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-service-ca\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.019248 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.025953 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8f58a5f6-85cf-4825-8c78-7b0e71c1fd53-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-b5shd\" (UID: \"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.192363 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.210003 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" event={"ID":"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53","Type":"ContainerStarted","Data":"7b35310d481400a5a35fdb077c077067d6aa4363e187d35fd6eb5928739bfe52"} Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.735574 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:02 crc kubenswrapper[4611]: E0929 12:42:02.735711 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.735779 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:02 crc kubenswrapper[4611]: I0929 12:42:02.735831 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:02 crc kubenswrapper[4611]: E0929 12:42:02.735900 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:02 crc kubenswrapper[4611]: E0929 12:42:02.735953 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:03 crc kubenswrapper[4611]: I0929 12:42:03.215067 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" event={"ID":"8f58a5f6-85cf-4825-8c78-7b0e71c1fd53","Type":"ContainerStarted","Data":"8a205385c0de6fbb3af4d0284cc40e19dd9d45d1330f48e47e0bb33691086115"} Sep 29 12:42:03 crc kubenswrapper[4611]: I0929 12:42:03.232042 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b5shd" podStartSLOduration=86.232023428 podStartE2EDuration="1m26.232023428s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:03.231144632 +0000 UTC m=+110.122664238" watchObservedRunningTime="2025-09-29 12:42:03.232023428 +0000 UTC m=+110.123543034" Sep 29 12:42:03 crc kubenswrapper[4611]: I0929 12:42:03.736214 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:03 crc kubenswrapper[4611]: E0929 12:42:03.737349 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:04 crc kubenswrapper[4611]: I0929 12:42:04.735642 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:04 crc kubenswrapper[4611]: I0929 12:42:04.735712 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:04 crc kubenswrapper[4611]: E0929 12:42:04.735766 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:04 crc kubenswrapper[4611]: E0929 12:42:04.735836 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:04 crc kubenswrapper[4611]: I0929 12:42:04.736089 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:04 crc kubenswrapper[4611]: E0929 12:42:04.736153 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:05 crc kubenswrapper[4611]: I0929 12:42:05.736037 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:05 crc kubenswrapper[4611]: E0929 12:42:05.736207 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:06 crc kubenswrapper[4611]: I0929 12:42:06.735259 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:06 crc kubenswrapper[4611]: I0929 12:42:06.735335 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:06 crc kubenswrapper[4611]: I0929 12:42:06.735290 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:06 crc kubenswrapper[4611]: E0929 12:42:06.735408 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:06 crc kubenswrapper[4611]: E0929 12:42:06.735584 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:06 crc kubenswrapper[4611]: E0929 12:42:06.735680 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:07 crc kubenswrapper[4611]: I0929 12:42:07.736157 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:07 crc kubenswrapper[4611]: E0929 12:42:07.736281 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:08 crc kubenswrapper[4611]: I0929 12:42:08.735754 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:08 crc kubenswrapper[4611]: E0929 12:42:08.735891 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:08 crc kubenswrapper[4611]: I0929 12:42:08.735764 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:08 crc kubenswrapper[4611]: I0929 12:42:08.735763 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:08 crc kubenswrapper[4611]: E0929 12:42:08.735969 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:08 crc kubenswrapper[4611]: E0929 12:42:08.736143 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:09 crc kubenswrapper[4611]: I0929 12:42:09.735425 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:09 crc kubenswrapper[4611]: E0929 12:42:09.735560 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:10 crc kubenswrapper[4611]: I0929 12:42:10.736266 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:10 crc kubenswrapper[4611]: I0929 12:42:10.736266 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:10 crc kubenswrapper[4611]: I0929 12:42:10.736434 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:10 crc kubenswrapper[4611]: E0929 12:42:10.736526 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:10 crc kubenswrapper[4611]: E0929 12:42:10.736663 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:10 crc kubenswrapper[4611]: E0929 12:42:10.736727 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:11 crc kubenswrapper[4611]: I0929 12:42:11.735782 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:11 crc kubenswrapper[4611]: E0929 12:42:11.736179 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:12 crc kubenswrapper[4611]: I0929 12:42:12.735238 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:12 crc kubenswrapper[4611]: E0929 12:42:12.735837 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:12 crc kubenswrapper[4611]: I0929 12:42:12.735270 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:12 crc kubenswrapper[4611]: I0929 12:42:12.735241 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:12 crc kubenswrapper[4611]: E0929 12:42:12.736054 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:12 crc kubenswrapper[4611]: E0929 12:42:12.736166 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.244316 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/1.log" Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.245260 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/0.log" Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.245333 4611 generic.go:334] "Generic (PLEG): container finished" podID="18731b4e-6360-4d87-b586-0a9dc6b5af1e" containerID="8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389" exitCode=1 Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.245373 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerDied","Data":"8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389"} Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.245417 4611 scope.go:117] "RemoveContainer" containerID="c8f499292f3b67e5be645d80bfbba8a44b6e12220592c5e6d4695e4c1c4bebc7" Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.245786 4611 scope.go:117] "RemoveContainer" containerID="8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389" Sep 29 12:42:13 crc kubenswrapper[4611]: E0929 12:42:13.245979 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-csch6_openshift-multus(18731b4e-6360-4d87-b586-0a9dc6b5af1e)\"" pod="openshift-multus/multus-csch6" podUID="18731b4e-6360-4d87-b586-0a9dc6b5af1e" Sep 29 12:42:13 crc kubenswrapper[4611]: I0929 12:42:13.735508 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:13 crc kubenswrapper[4611]: E0929 12:42:13.736866 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:13 crc kubenswrapper[4611]: E0929 12:42:13.754014 4611 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 12:42:13 crc kubenswrapper[4611]: E0929 12:42:13.831383 4611 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 12:42:14 crc kubenswrapper[4611]: I0929 12:42:14.249408 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/1.log" Sep 29 12:42:14 crc kubenswrapper[4611]: I0929 12:42:14.735792 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:14 crc kubenswrapper[4611]: I0929 12:42:14.735868 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:14 crc kubenswrapper[4611]: I0929 12:42:14.735886 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:14 crc kubenswrapper[4611]: E0929 12:42:14.735915 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:14 crc kubenswrapper[4611]: E0929 12:42:14.735950 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:14 crc kubenswrapper[4611]: E0929 12:42:14.736045 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:15 crc kubenswrapper[4611]: I0929 12:42:15.736926 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:15 crc kubenswrapper[4611]: E0929 12:42:15.737269 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:15 crc kubenswrapper[4611]: I0929 12:42:15.737677 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.256030 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/3.log" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.258293 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerStarted","Data":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.258676 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.285965 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podStartSLOduration=98.285950432 podStartE2EDuration="1m38.285950432s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:16.285611643 +0000 UTC m=+123.177131249" watchObservedRunningTime="2025-09-29 12:42:16.285950432 +0000 UTC m=+123.177470038" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.467750 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-xtjl8"] Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.467856 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:16 crc kubenswrapper[4611]: E0929 12:42:16.467956 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.735248 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.735296 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:16 crc kubenswrapper[4611]: I0929 12:42:16.735256 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:16 crc kubenswrapper[4611]: E0929 12:42:16.735375 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:16 crc kubenswrapper[4611]: E0929 12:42:16.735477 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:16 crc kubenswrapper[4611]: E0929 12:42:16.735551 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:17 crc kubenswrapper[4611]: I0929 12:42:17.736360 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:17 crc kubenswrapper[4611]: E0929 12:42:17.736503 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:18 crc kubenswrapper[4611]: I0929 12:42:18.735703 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:18 crc kubenswrapper[4611]: E0929 12:42:18.735850 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:18 crc kubenswrapper[4611]: I0929 12:42:18.735908 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:18 crc kubenswrapper[4611]: I0929 12:42:18.735935 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:18 crc kubenswrapper[4611]: E0929 12:42:18.736058 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:18 crc kubenswrapper[4611]: E0929 12:42:18.736155 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:18 crc kubenswrapper[4611]: E0929 12:42:18.832403 4611 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 12:42:19 crc kubenswrapper[4611]: I0929 12:42:19.735855 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:19 crc kubenswrapper[4611]: E0929 12:42:19.736294 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:20 crc kubenswrapper[4611]: I0929 12:42:20.735577 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:20 crc kubenswrapper[4611]: I0929 12:42:20.735714 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:20 crc kubenswrapper[4611]: E0929 12:42:20.736038 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:20 crc kubenswrapper[4611]: E0929 12:42:20.736140 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:20 crc kubenswrapper[4611]: I0929 12:42:20.735714 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:20 crc kubenswrapper[4611]: E0929 12:42:20.736201 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:21 crc kubenswrapper[4611]: I0929 12:42:21.735886 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:21 crc kubenswrapper[4611]: E0929 12:42:21.736079 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:22 crc kubenswrapper[4611]: I0929 12:42:22.736266 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:22 crc kubenswrapper[4611]: E0929 12:42:22.736438 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:22 crc kubenswrapper[4611]: I0929 12:42:22.736271 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:22 crc kubenswrapper[4611]: E0929 12:42:22.736725 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:22 crc kubenswrapper[4611]: I0929 12:42:22.736285 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:22 crc kubenswrapper[4611]: E0929 12:42:22.736866 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:23 crc kubenswrapper[4611]: I0929 12:42:23.735322 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:23 crc kubenswrapper[4611]: E0929 12:42:23.736343 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:23 crc kubenswrapper[4611]: E0929 12:42:23.832862 4611 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 12:42:24 crc kubenswrapper[4611]: I0929 12:42:24.736376 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:24 crc kubenswrapper[4611]: I0929 12:42:24.736453 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:24 crc kubenswrapper[4611]: E0929 12:42:24.736515 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:24 crc kubenswrapper[4611]: I0929 12:42:24.736397 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:24 crc kubenswrapper[4611]: E0929 12:42:24.736603 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:24 crc kubenswrapper[4611]: E0929 12:42:24.736705 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:24 crc kubenswrapper[4611]: I0929 12:42:24.736812 4611 scope.go:117] "RemoveContainer" containerID="8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389" Sep 29 12:42:25 crc kubenswrapper[4611]: I0929 12:42:25.286481 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/1.log" Sep 29 12:42:25 crc kubenswrapper[4611]: I0929 12:42:25.286545 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerStarted","Data":"3019c59039b6ddebfa387398ba37323b792ff4c1ac4de148cceb69288d0121fe"} Sep 29 12:42:25 crc kubenswrapper[4611]: I0929 12:42:25.735807 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:25 crc kubenswrapper[4611]: E0929 12:42:25.736042 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:26 crc kubenswrapper[4611]: I0929 12:42:26.735656 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:26 crc kubenswrapper[4611]: E0929 12:42:26.736214 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:26 crc kubenswrapper[4611]: I0929 12:42:26.735973 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:26 crc kubenswrapper[4611]: I0929 12:42:26.735732 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:26 crc kubenswrapper[4611]: E0929 12:42:26.736426 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:26 crc kubenswrapper[4611]: E0929 12:42:26.736526 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:27 crc kubenswrapper[4611]: I0929 12:42:27.735507 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:27 crc kubenswrapper[4611]: E0929 12:42:27.735729 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xtjl8" podUID="c2df08da-22ae-44b9-b568-06bafc65932c" Sep 29 12:42:28 crc kubenswrapper[4611]: I0929 12:42:28.735998 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:28 crc kubenswrapper[4611]: E0929 12:42:28.736143 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 12:42:28 crc kubenswrapper[4611]: I0929 12:42:28.736002 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:28 crc kubenswrapper[4611]: I0929 12:42:28.735999 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:28 crc kubenswrapper[4611]: E0929 12:42:28.736266 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 12:42:28 crc kubenswrapper[4611]: E0929 12:42:28.736344 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 12:42:29 crc kubenswrapper[4611]: I0929 12:42:29.736162 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:42:29 crc kubenswrapper[4611]: I0929 12:42:29.738249 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 12:42:29 crc kubenswrapper[4611]: I0929 12:42:29.738261 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.735959 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.736024 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.736518 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.738284 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.738323 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.738335 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 12:42:30 crc kubenswrapper[4611]: I0929 12:42:30.738284 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.899093 4611 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.940856 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p68fp"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.941408 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.941843 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.942399 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.942777 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g85mz"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.943893 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.944067 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.944650 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.945147 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.945411 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qght9"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.945573 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.945828 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.946346 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-kv8lp"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.948950 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.958902 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.959355 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.964255 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.964423 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.964812 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.964852 4611 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-cliconfig": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-cliconfig" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.964878 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-cliconfig\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-cliconfig\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.964926 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.965205 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.965345 4611 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-error": failed to list *v1.Secret: secrets "v4-0-config-user-template-error" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.965375 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-error\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-error\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.966210 4611 reflector.go:561] object-"openshift-authentication"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.966263 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.966369 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.966765 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.966972 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967202 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4fxjt"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967230 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967285 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967381 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967523 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967639 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.967920 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.968139 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.968459 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.968557 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.968649 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.968739 4611 reflector.go:561] object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc": failed to list *v1.Secret: secrets "oauth-openshift-dockercfg-znhcc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.968760 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"oauth-openshift-dockercfg-znhcc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"oauth-openshift-dockercfg-znhcc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.968918 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.968924 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.969130 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.969935 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980031 4611 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-config": failed to list *v1.ConfigMap: configmaps "machine-approver-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980081 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-approver-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980142 4611 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4": failed to list *v1.Secret: secrets "machine-approver-sa-dockercfg-nl2j4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980157 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-sa-dockercfg-nl2j4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-sa-dockercfg-nl2j4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980197 4611 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-tls": failed to list *v1.Secret: secrets "machine-approver-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980209 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980256 4611 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980271 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980319 4611 reflector.go:561] object-"openshift-apiserver-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980334 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980383 4611 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert": failed to list *v1.Secret: secrets "openshift-apiserver-operator-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980399 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980448 4611 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv": failed to list *v1.Secret: secrets "openshift-apiserver-operator-dockercfg-xtcjv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980463 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-dockercfg-xtcjv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-dockercfg-xtcjv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980511 4611 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config": failed to list *v1.ConfigMap: configmaps "openshift-apiserver-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980525 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-apiserver-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.980573 4611 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template": failed to list *v1.Secret: secrets "v4-0-config-system-ocp-branding-template" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.980588 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-ocp-branding-template\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-ocp-branding-template\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.980931 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-c7v9z"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.981433 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.983278 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.983617 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.983754 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.985193 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.988253 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.988310 4611 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.988356 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.990751 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.990840 4611 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.990871 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.990750 4611 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.990899 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: W0929 12:42:32.991050 4611 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Sep 29 12:42:32 crc kubenswrapper[4611]: E0929 12:42:32.991064 4611 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.991127 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.994332 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.995535 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.995742 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.996065 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hml26"] Sep 29 12:42:32 crc kubenswrapper[4611]: I0929 12:42:32.996546 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.000343 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.000581 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.000679 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.001595 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.002143 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.002401 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.002572 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.003473 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.003900 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.005422 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.006089 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.013274 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.015602 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.016980 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017051 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017096 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017180 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017278 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017349 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017389 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-6bg7b"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017417 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017484 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017566 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017646 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-jjrl6"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.017902 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.018042 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.018138 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.018305 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.018850 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.025617 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.026116 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8hq4m"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.026554 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nt8jb"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.026929 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.027212 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.027362 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.042848 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.043044 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.043314 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044075 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-image-import-ca\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044107 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85b3a01b-0801-431c-aa43-2a0170aeb76f-serving-cert\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044131 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044148 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044175 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-config\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044195 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-audit\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044209 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-encryption-config\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044281 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz25h\" (UniqueName: \"kubernetes.io/projected/85b3a01b-0801-431c-aa43-2a0170aeb76f-kube-api-access-gz25h\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044298 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044319 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcv2b\" (UniqueName: \"kubernetes.io/projected/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-kube-api-access-kcv2b\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044341 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e5af436c-0542-4160-8a3b-418382623820-machine-approver-tls\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044367 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044389 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm4ck\" (UniqueName: \"kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044413 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvrvq\" (UniqueName: \"kubernetes.io/projected/14166aa7-7554-4165-9a14-f222a13d3c82-kube-api-access-dvrvq\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044436 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clbts\" (UniqueName: \"kubernetes.io/projected/e5af436c-0542-4160-8a3b-418382623820-kube-api-access-clbts\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044466 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044482 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhftn\" (UniqueName: \"kubernetes.io/projected/ca3bb61b-97fa-4e78-b84c-66b82267072d-kube-api-access-bhftn\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044498 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/85b3a01b-0801-431c-aa43-2a0170aeb76f-trusted-ca\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044516 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044531 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14166aa7-7554-4165-9a14-f222a13d3c82-config\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044546 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-auth-proxy-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044564 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044579 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044594 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044608 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-client-ca\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044639 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044653 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1387ce40-000f-4e95-adda-a347b7574779-serving-cert\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044668 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85b3a01b-0801-431c-aa43-2a0170aeb76f-config\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044684 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044703 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-policies\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044724 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-dir\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044738 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044754 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ca3bb61b-97fa-4e78-b84c-66b82267072d-audit-dir\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044776 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml27m\" (UniqueName: \"kubernetes.io/projected/1387ce40-000f-4e95-adda-a347b7574779-kube-api-access-ml27m\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044790 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-etcd-client\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044804 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044821 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044838 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044852 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-etcd-serving-ca\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044864 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-serving-cert\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044879 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044893 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/14166aa7-7554-4165-9a14-f222a13d3c82-images\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044909 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ca3bb61b-97fa-4e78-b84c-66b82267072d-node-pullsecrets\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.044924 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-config\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.048927 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.049466 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.059217 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.059642 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.070535 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.070801 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.070949 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.071131 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.071544 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.094382 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.095122 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.095995 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.096451 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.105502 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.106912 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.107002 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.107448 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.111160 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.111994 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112119 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112240 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112336 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112418 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112507 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112599 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.112712 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.113834 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.114764 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.114913 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.115014 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.115123 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.115217 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.115236 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.115309 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.115435 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.117262 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jmp7r"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.117812 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.118339 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.118944 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.123258 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.124027 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.136045 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.136522 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p68fp"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.136593 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.140975 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.141977 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.144873 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.145930 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.145965 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2434a6e-935b-4dde-ad5c-cbb8364c7034-audit-dir\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.145992 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146026 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9dzp\" (UniqueName: \"kubernetes.io/projected/fb819a41-a91e-439f-a34f-331f9731340a-kube-api-access-m9dzp\") pod \"downloads-7954f5f757-jjrl6\" (UID: \"fb819a41-a91e-439f-a34f-331f9731340a\") " pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146047 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0732fc85-8bf5-473f-ba34-50f375d7332d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146083 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-config\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146103 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-audit\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146125 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-encryption-config\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146149 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcpgb\" (UniqueName: \"kubernetes.io/projected/c1afa714-1c99-43a8-ba3f-96af7f49abd5-kube-api-access-lcpgb\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146173 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz25h\" (UniqueName: \"kubernetes.io/projected/85b3a01b-0801-431c-aa43-2a0170aeb76f-kube-api-access-gz25h\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146196 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g76j\" (UniqueName: \"kubernetes.io/projected/0732fc85-8bf5-473f-ba34-50f375d7332d-kube-api-access-8g76j\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146222 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146244 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcv2b\" (UniqueName: \"kubernetes.io/projected/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-kube-api-access-kcv2b\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146266 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e5af436c-0542-4160-8a3b-418382623820-machine-approver-tls\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146290 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-serving-cert\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146312 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-audit-policies\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146338 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146360 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm4ck\" (UniqueName: \"kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146381 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d3dc824-e247-4a32-8870-95ee23ca6241-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146405 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvrvq\" (UniqueName: \"kubernetes.io/projected/14166aa7-7554-4165-9a14-f222a13d3c82-kube-api-access-dvrvq\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146427 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232ad623-50fd-4e92-ac89-cb548fbf140b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146449 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49949be7-0f3b-46cd-b895-079221aa632d-serving-cert\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146470 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqdqv\" (UniqueName: \"kubernetes.io/projected/cf70983e-cbf7-4c9d-ab94-2aceade70418-kube-api-access-jqdqv\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146494 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clbts\" (UniqueName: \"kubernetes.io/projected/e5af436c-0542-4160-8a3b-418382623820-kube-api-access-clbts\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146515 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cf70983e-cbf7-4c9d-ab94-2aceade70418-available-featuregates\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146535 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0732fc85-8bf5-473f-ba34-50f375d7332d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146555 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-service-ca\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146588 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146614 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhftn\" (UniqueName: \"kubernetes.io/projected/ca3bb61b-97fa-4e78-b84c-66b82267072d-kube-api-access-bhftn\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146656 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/85b3a01b-0801-431c-aa43-2a0170aeb76f-trusted-ca\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146679 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-client-ca\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146701 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146723 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14166aa7-7554-4165-9a14-f222a13d3c82-config\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146744 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad4b3e69-ec86-4fba-bd07-af9f1532fed1-metrics-tls\") pod \"dns-operator-744455d44c-8hq4m\" (UID: \"ad4b3e69-ec86-4fba-bd07-af9f1532fed1\") " pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146767 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-auth-proxy-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146788 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146808 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146832 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146855 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-client-ca\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146878 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-trusted-ca-bundle\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146901 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146925 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1387ce40-000f-4e95-adda-a347b7574779-serving-cert\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146951 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85b3a01b-0801-431c-aa43-2a0170aeb76f-config\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146974 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxz8m\" (UniqueName: \"kubernetes.io/projected/ad4b3e69-ec86-4fba-bd07-af9f1532fed1-kube-api-access-bxz8m\") pod \"dns-operator-744455d44c-8hq4m\" (UID: \"ad4b3e69-ec86-4fba-bd07-af9f1532fed1\") " pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.146998 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d3dc824-e247-4a32-8870-95ee23ca6241-config\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147023 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147065 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147089 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/13efc77e-d80b-4d27-adc7-e93303568154-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2fct6\" (UID: \"13efc77e-d80b-4d27-adc7-e93303568154\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147124 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-policies\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147148 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-config\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147183 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-dir\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147218 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147243 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-service-ca-bundle\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147271 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ca3bb61b-97fa-4e78-b84c-66b82267072d-audit-dir\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147294 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147318 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6blsv\" (UniqueName: \"kubernetes.io/projected/4348f944-d51c-4fdc-8789-646958d61216-kube-api-access-6blsv\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147345 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/232ad623-50fd-4e92-ac89-cb548fbf140b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147368 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-config\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147390 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tlpz\" (UniqueName: \"kubernetes.io/projected/13efc77e-d80b-4d27-adc7-e93303568154-kube-api-access-2tlpz\") pod \"cluster-samples-operator-665b6dd947-2fct6\" (UID: \"13efc77e-d80b-4d27-adc7-e93303568154\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147422 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml27m\" (UniqueName: \"kubernetes.io/projected/1387ce40-000f-4e95-adda-a347b7574779-kube-api-access-ml27m\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147445 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5vkn\" (UniqueName: \"kubernetes.io/projected/49949be7-0f3b-46cd-b895-079221aa632d-kube-api-access-k5vkn\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147469 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147491 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-serving-cert\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147515 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-etcd-client\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147540 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147565 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147592 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.147614 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-etcd-serving-ca\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183749 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-serving-cert\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183797 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-config\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183822 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-etcd-client\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183847 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183872 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d3dc824-e247-4a32-8870-95ee23ca6241-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183894 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-oauth-config\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183918 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/14166aa7-7554-4165-9a14-f222a13d3c82-images\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183945 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ca3bb61b-97fa-4e78-b84c-66b82267072d-node-pullsecrets\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.183974 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-config\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184000 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf70983e-cbf7-4c9d-ab94-2aceade70418-serving-cert\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184022 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4348f944-d51c-4fdc-8789-646958d61216-serving-cert\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184043 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-encryption-config\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184065 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bxfg\" (UniqueName: \"kubernetes.io/projected/a2434a6e-935b-4dde-ad5c-cbb8364c7034-kube-api-access-4bxfg\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184088 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0732fc85-8bf5-473f-ba34-50f375d7332d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184115 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-image-import-ca\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184136 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85b3a01b-0801-431c-aa43-2a0170aeb76f-serving-cert\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184161 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-oauth-serving-cert\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184191 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/232ad623-50fd-4e92-ac89-cb548fbf140b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.184214 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.148410 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-etcd-serving-ca\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.149331 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.194637 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.195420 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.201995 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-ms74j"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.149427 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.150230 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14166aa7-7554-4165-9a14-f222a13d3c82-config\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.150295 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.150675 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-client-ca\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.151566 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.204345 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/14166aa7-7554-4165-9a14-f222a13d3c82-images\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.152308 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-config\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.204438 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ca3bb61b-97fa-4e78-b84c-66b82267072d-node-pullsecrets\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.152771 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-audit\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.205994 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.206546 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.207731 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-etcd-client\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.211753 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.212162 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-serving-cert\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.212259 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.212785 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.214393 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.214656 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.215102 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1387ce40-000f-4e95-adda-a347b7574779-serving-cert\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.215797 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.216112 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ca3bb61b-97fa-4e78-b84c-66b82267072d-encryption-config\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.216248 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-image-import-ca\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.169212 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85b3a01b-0801-431c-aa43-2a0170aeb76f-config\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.169748 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-policies\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.169796 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-dir\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.170373 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.170492 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ca3bb61b-97fa-4e78-b84c-66b82267072d-audit-dir\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.173449 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.216489 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.216596 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.216976 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3bb61b-97fa-4e78-b84c-66b82267072d-config\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.217068 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.217177 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.217510 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.151821 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.171429 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.171470 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.171505 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.177259 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.221524 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.228478 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.228712 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.228801 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.228843 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.228974 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85b3a01b-0801-431c-aa43-2a0170aeb76f-serving-cert\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.232027 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.233013 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.233398 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-h94vp"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.233946 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/85b3a01b-0801-431c-aa43-2a0170aeb76f-trusted-ca\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.234094 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.238238 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.238644 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rcs29"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.238650 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.238943 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.239047 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.239290 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.239423 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.240461 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.242066 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-c7v9z"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.246749 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.248664 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.248711 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.249791 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-2fgwq"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.250431 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.255746 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hml26"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.257669 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-7lv98"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.258365 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.259260 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-jjrl6"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.260721 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qght9"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.263876 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g85mz"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.263915 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8hq4m"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.266239 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.266958 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nt8jb"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.268744 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.271771 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.274551 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.277791 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jmp7r"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.280098 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.282488 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4fxjt"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.284283 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285220 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tlpz\" (UniqueName: \"kubernetes.io/projected/13efc77e-d80b-4d27-adc7-e93303568154-kube-api-access-2tlpz\") pod \"cluster-samples-operator-665b6dd947-2fct6\" (UID: \"13efc77e-d80b-4d27-adc7-e93303568154\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285244 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-serving-cert\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285273 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5vkn\" (UniqueName: \"kubernetes.io/projected/49949be7-0f3b-46cd-b895-079221aa632d-kube-api-access-k5vkn\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285290 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285312 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-config\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285331 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d3dc824-e247-4a32-8870-95ee23ca6241-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285345 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-etcd-client\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285360 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-oauth-config\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285375 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-encryption-config\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285390 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bxfg\" (UniqueName: \"kubernetes.io/projected/a2434a6e-935b-4dde-ad5c-cbb8364c7034-kube-api-access-4bxfg\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285404 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0732fc85-8bf5-473f-ba34-50f375d7332d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285422 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf70983e-cbf7-4c9d-ab94-2aceade70418-serving-cert\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285437 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4348f944-d51c-4fdc-8789-646958d61216-serving-cert\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285452 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-oauth-serving-cert\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285467 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285483 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/232ad623-50fd-4e92-ac89-cb548fbf140b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285505 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2434a6e-935b-4dde-ad5c-cbb8364c7034-audit-dir\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285522 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9dzp\" (UniqueName: \"kubernetes.io/projected/fb819a41-a91e-439f-a34f-331f9731340a-kube-api-access-m9dzp\") pod \"downloads-7954f5f757-jjrl6\" (UID: \"fb819a41-a91e-439f-a34f-331f9731340a\") " pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285538 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0732fc85-8bf5-473f-ba34-50f375d7332d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285668 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcpgb\" (UniqueName: \"kubernetes.io/projected/c1afa714-1c99-43a8-ba3f-96af7f49abd5-kube-api-access-lcpgb\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285693 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g76j\" (UniqueName: \"kubernetes.io/projected/0732fc85-8bf5-473f-ba34-50f375d7332d-kube-api-access-8g76j\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285728 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-serving-cert\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285757 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d3dc824-e247-4a32-8870-95ee23ca6241-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285773 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-audit-policies\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285797 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232ad623-50fd-4e92-ac89-cb548fbf140b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285811 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49949be7-0f3b-46cd-b895-079221aa632d-serving-cert\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285827 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqdqv\" (UniqueName: \"kubernetes.io/projected/cf70983e-cbf7-4c9d-ab94-2aceade70418-kube-api-access-jqdqv\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285842 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cf70983e-cbf7-4c9d-ab94-2aceade70418-available-featuregates\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285858 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0732fc85-8bf5-473f-ba34-50f375d7332d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285884 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-client-ca\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285898 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-service-ca\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285921 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad4b3e69-ec86-4fba-bd07-af9f1532fed1-metrics-tls\") pod \"dns-operator-744455d44c-8hq4m\" (UID: \"ad4b3e69-ec86-4fba-bd07-af9f1532fed1\") " pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285947 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-trusted-ca-bundle\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285969 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxz8m\" (UniqueName: \"kubernetes.io/projected/ad4b3e69-ec86-4fba-bd07-af9f1532fed1-kube-api-access-bxz8m\") pod \"dns-operator-744455d44c-8hq4m\" (UID: \"ad4b3e69-ec86-4fba-bd07-af9f1532fed1\") " pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.285989 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d3dc824-e247-4a32-8870-95ee23ca6241-config\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286009 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286026 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/13efc77e-d80b-4d27-adc7-e93303568154-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2fct6\" (UID: \"13efc77e-d80b-4d27-adc7-e93303568154\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286057 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-service-ca-bundle\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286073 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-config\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286099 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286116 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6blsv\" (UniqueName: \"kubernetes.io/projected/4348f944-d51c-4fdc-8789-646958d61216-kube-api-access-6blsv\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286134 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-config\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.286153 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/232ad623-50fd-4e92-ac89-cb548fbf140b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.289564 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-serving-cert\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.289572 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-audit-policies\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.290818 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4348f944-d51c-4fdc-8789-646958d61216-serving-cert\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.290838 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-service-ca-bundle\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.290845 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cf70983e-cbf7-4c9d-ab94-2aceade70418-available-featuregates\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.291407 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2434a6e-935b-4dde-ad5c-cbb8364c7034-audit-dir\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.291607 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-config\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.291779 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-config\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.292530 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-trusted-ca-bundle\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.292827 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0732fc85-8bf5-473f-ba34-50f375d7332d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.294291 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-client-ca\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.294309 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-oauth-config\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.294703 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d3dc824-e247-4a32-8870-95ee23ca6241-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.295010 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.295998 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-oauth-serving-cert\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.296252 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0732fc85-8bf5-473f-ba34-50f375d7332d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.296313 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49949be7-0f3b-46cd-b895-079221aa632d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.297175 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-etcd-client\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.297504 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.302681 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/13efc77e-d80b-4d27-adc7-e93303568154-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2fct6\" (UID: \"13efc77e-d80b-4d27-adc7-e93303568154\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.303022 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-service-ca\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.303023 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d3dc824-e247-4a32-8870-95ee23ca6241-config\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.303339 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-config\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.303660 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-serving-cert\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.304064 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.306131 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf70983e-cbf7-4c9d-ab94-2aceade70418-serving-cert\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.308056 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.308113 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vbvkv"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.308521 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a2434a6e-935b-4dde-ad5c-cbb8364c7034-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.309427 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.311112 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-smw45"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.311670 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.311963 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49949be7-0f3b-46cd-b895-079221aa632d-serving-cert\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.317025 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a2434a6e-935b-4dde-ad5c-cbb8364c7034-encryption-config\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.319147 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.329016 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.330782 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.332730 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.334565 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-kv8lp"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.336216 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.339139 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/232ad623-50fd-4e92-ac89-cb548fbf140b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.339190 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.341781 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rcs29"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.344458 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.344858 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vbvkv"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.345610 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-h94vp"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.348510 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.351781 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-2fgwq"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.362026 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad4b3e69-ec86-4fba-bd07-af9f1532fed1-metrics-tls\") pod \"dns-operator-744455d44c-8hq4m\" (UID: \"ad4b3e69-ec86-4fba-bd07-af9f1532fed1\") " pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.363015 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.365448 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.367435 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.370569 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232ad623-50fd-4e92-ac89-cb548fbf140b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.373053 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.376653 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.378129 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.380018 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-smw45"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.381176 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.382436 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-ms74j"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.383305 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.383615 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.384633 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr"] Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.403904 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.424037 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.443787 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.465793 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.483903 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.523581 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.544242 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.562930 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.583423 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.604407 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.623222 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.643689 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.684168 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.704088 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.724792 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.743650 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.763416 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.783581 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.803911 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.823390 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.843271 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.864052 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.884540 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.903953 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.923338 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.943558 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.969865 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 12:42:33 crc kubenswrapper[4611]: I0929 12:42:33.984268 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.004225 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.024560 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.044015 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.069215 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.084865 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.103544 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.124895 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.144902 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150514 4611 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150641 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-config podName:8ed60ce8-626d-408f-8e0e-7b1e1855c47d nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.650598635 +0000 UTC m=+141.542118241 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-config") pod "openshift-apiserver-operator-796bbdcf4f-m5xtn" (UID: "8ed60ce8-626d-408f-8e0e-7b1e1855c47d") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150527 4611 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150758 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-serving-cert podName:8ed60ce8-626d-408f-8e0e-7b1e1855c47d nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.650744269 +0000 UTC m=+141.542263875 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-m5xtn" (UID: "8ed60ce8-626d-408f-8e0e-7b1e1855c47d") : failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150541 4611 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150818 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls podName:14166aa7-7554-4165-9a14-f222a13d3c82 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.65079036 +0000 UTC m=+141.542309966 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-kv8lp" (UID: "14166aa7-7554-4165-9a14-f222a13d3c82") : failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150539 4611 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.150856 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-auth-proxy-config podName:e5af436c-0542-4160-8a3b-418382623820 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.650846852 +0000 UTC m=+141.542366458 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "auth-proxy-config" (UniqueName: "kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-auth-proxy-config") pod "machine-approver-56656f9798-b9wt8" (UID: "e5af436c-0542-4160-8a3b-418382623820") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.153757 4611 secret.go:188] Couldn't get secret openshift-cluster-machine-approver/machine-approver-tls: failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.153863 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5af436c-0542-4160-8a3b-418382623820-machine-approver-tls podName:e5af436c-0542-4160-8a3b-418382623820 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.653827024 +0000 UTC m=+141.545346710 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-approver-tls" (UniqueName: "kubernetes.io/secret/e5af436c-0542-4160-8a3b-418382623820-machine-approver-tls") pod "machine-approver-56656f9798-b9wt8" (UID: "e5af436c-0542-4160-8a3b-418382623820") : failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.162336 4611 request.go:700] Waited for 1.009169186s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console-operator/serviceaccounts/console-operator/token Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.162584 4611 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-error: failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.163128 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error podName:f5f3de88-84fb-42b9-953f-74f002a8af28 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.662646628 +0000 UTC m=+141.554166234 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-error" (UniqueName: "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error") pod "oauth-openshift-558db77b4-qght9" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28") : failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.174812 4611 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-ocp-branding-template: failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.174851 4611 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-cliconfig: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.174909 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template podName:f5f3de88-84fb-42b9-953f-74f002a8af28 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.674886496 +0000 UTC m=+141.566406102 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-ocp-branding-template" (UniqueName: "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template") pod "oauth-openshift-558db77b4-qght9" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28") : failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.175003 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig podName:f5f3de88-84fb-42b9-953f-74f002a8af28 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.674974068 +0000 UTC m=+141.566493714 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-cliconfig" (UniqueName: "kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig") pod "oauth-openshift-558db77b4-qght9" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.176641 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.180341 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz25h\" (UniqueName: \"kubernetes.io/projected/85b3a01b-0801-431c-aa43-2a0170aeb76f-kube-api-access-gz25h\") pod \"console-operator-58897d9998-p68fp\" (UID: \"85b3a01b-0801-431c-aa43-2a0170aeb76f\") " pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.203952 4611 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/machine-approver-config: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: E0929 12:42:34.204023 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-config podName:e5af436c-0542-4160-8a3b-418382623820 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:34.70400813 +0000 UTC m=+141.595527736 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-config") pod "machine-approver-56656f9798-b9wt8" (UID: "e5af436c-0542-4160-8a3b-418382623820") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.262082 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhftn\" (UniqueName: \"kubernetes.io/projected/ca3bb61b-97fa-4e78-b84c-66b82267072d-kube-api-access-bhftn\") pod \"apiserver-76f77b778f-g85mz\" (UID: \"ca3bb61b-97fa-4e78-b84c-66b82267072d\") " pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.288104 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml27m\" (UniqueName: \"kubernetes.io/projected/1387ce40-000f-4e95-adda-a347b7574779-kube-api-access-ml27m\") pod \"route-controller-manager-6576b87f9c-qf59l\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.303769 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.323287 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.344045 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.363722 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.383385 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.405048 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.424278 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.443408 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.464351 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.466382 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.469393 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.483796 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.486206 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.504527 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.524264 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.543473 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.566790 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.584316 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.604227 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.624937 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.644183 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.661143 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l"] Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.664757 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.683078 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.698438 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g85mz"] Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705671 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-auth-proxy-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705705 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705723 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705760 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705813 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705834 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705867 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705917 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e5af436c-0542-4160-8a3b-418382623820-machine-approver-tls\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.705937 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.707054 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.723957 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.751438 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.764504 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.784096 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.804122 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.827268 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.844230 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.851573 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p68fp"] Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.863357 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 12:42:34 crc kubenswrapper[4611]: W0929 12:42:34.864980 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85b3a01b_0801_431c_aa43_2a0170aeb76f.slice/crio-27cf004bedf5b8a72d72f6db5a9303145f21f3077be83b02f4ab96974797238e WatchSource:0}: Error finding container 27cf004bedf5b8a72d72f6db5a9303145f21f3077be83b02f4ab96974797238e: Status 404 returned error can't find the container with id 27cf004bedf5b8a72d72f6db5a9303145f21f3077be83b02f4ab96974797238e Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.883706 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.903988 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.923662 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.944374 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.964641 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 12:42:34 crc kubenswrapper[4611]: I0929 12:42:34.983892 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.021782 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tlpz\" (UniqueName: \"kubernetes.io/projected/13efc77e-d80b-4d27-adc7-e93303568154-kube-api-access-2tlpz\") pod \"cluster-samples-operator-665b6dd947-2fct6\" (UID: \"13efc77e-d80b-4d27-adc7-e93303568154\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.036993 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d3dc824-e247-4a32-8870-95ee23ca6241-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgnqs\" (UID: \"7d3dc824-e247-4a32-8870-95ee23ca6241\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.059194 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5vkn\" (UniqueName: \"kubernetes.io/projected/49949be7-0f3b-46cd-b895-079221aa632d-kube-api-access-k5vkn\") pod \"authentication-operator-69f744f599-4fxjt\" (UID: \"49949be7-0f3b-46cd-b895-079221aa632d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.066726 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.075979 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0732fc85-8bf5-473f-ba34-50f375d7332d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.096886 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqdqv\" (UniqueName: \"kubernetes.io/projected/cf70983e-cbf7-4c9d-ab94-2aceade70418-kube-api-access-jqdqv\") pod \"openshift-config-operator-7777fb866f-t4mbb\" (UID: \"cf70983e-cbf7-4c9d-ab94-2aceade70418\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.119487 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/232ad623-50fd-4e92-ac89-cb548fbf140b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-46fsb\" (UID: \"232ad623-50fd-4e92-ac89-cb548fbf140b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.141919 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9dzp\" (UniqueName: \"kubernetes.io/projected/fb819a41-a91e-439f-a34f-331f9731340a-kube-api-access-m9dzp\") pod \"downloads-7954f5f757-jjrl6\" (UID: \"fb819a41-a91e-439f-a34f-331f9731340a\") " pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.165237 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxz8m\" (UniqueName: \"kubernetes.io/projected/ad4b3e69-ec86-4fba-bd07-af9f1532fed1-kube-api-access-bxz8m\") pod \"dns-operator-744455d44c-8hq4m\" (UID: \"ad4b3e69-ec86-4fba-bd07-af9f1532fed1\") " pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.167826 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.182595 4611 request.go:700] Waited for 1.890369118s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa/token Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.184178 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.185706 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcpgb\" (UniqueName: \"kubernetes.io/projected/c1afa714-1c99-43a8-ba3f-96af7f49abd5-kube-api-access-lcpgb\") pod \"console-f9d7485db-c7v9z\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.198341 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.198349 4611 projected.go:288] Couldn't get configMap openshift-apiserver-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.198396 4611 projected.go:194] Error preparing data for projected volume kube-api-access-kcv2b for pod openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.198452 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-kube-api-access-kcv2b podName:8ed60ce8-626d-408f-8e0e-7b1e1855c47d nodeName:}" failed. No retries permitted until 2025-09-29 12:42:35.69843494 +0000 UTC m=+142.589954546 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-kcv2b" (UniqueName: "kubernetes.io/projected/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-kube-api-access-kcv2b") pod "openshift-apiserver-operator-796bbdcf4f-m5xtn" (UID: "8ed60ce8-626d-408f-8e0e-7b1e1855c47d") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.203680 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6blsv\" (UniqueName: \"kubernetes.io/projected/4348f944-d51c-4fdc-8789-646958d61216-kube-api-access-6blsv\") pod \"controller-manager-879f6c89f-hml26\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.216949 4611 projected.go:288] Couldn't get configMap openshift-authentication/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.216982 4611 projected.go:194] Error preparing data for projected volume kube-api-access-cm4ck for pod openshift-authentication/oauth-openshift-558db77b4-qght9: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.217060 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck podName:f5f3de88-84fb-42b9-953f-74f002a8af28 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:35.717040064 +0000 UTC m=+142.608559670 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cm4ck" (UniqueName: "kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck") pod "oauth-openshift-558db77b4-qght9" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.219253 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.220361 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g76j\" (UniqueName: \"kubernetes.io/projected/0732fc85-8bf5-473f-ba34-50f375d7332d-kube-api-access-8g76j\") pod \"cluster-image-registry-operator-dc59b4c8b-hffln\" (UID: \"0732fc85-8bf5-473f-ba34-50f375d7332d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.235291 4611 projected.go:288] Couldn't get configMap openshift-cluster-machine-approver/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.235348 4611 projected.go:194] Error preparing data for projected volume kube-api-access-clbts for pod openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.235409 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e5af436c-0542-4160-8a3b-418382623820-kube-api-access-clbts podName:e5af436c-0542-4160-8a3b-418382623820 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:35.735391071 +0000 UTC m=+142.626910677 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-clbts" (UniqueName: "kubernetes.io/projected/e5af436c-0542-4160-8a3b-418382623820-kube-api-access-clbts") pod "machine-approver-56656f9798-b9wt8" (UID: "e5af436c-0542-4160-8a3b-418382623820") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.238138 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bxfg\" (UniqueName: \"kubernetes.io/projected/a2434a6e-935b-4dde-ad5c-cbb8364c7034-kube-api-access-4bxfg\") pod \"apiserver-7bbb656c7d-7q4jc\" (UID: \"a2434a6e-935b-4dde-ad5c-cbb8364c7034\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.243323 4611 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.247563 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.254210 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.264988 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.284956 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.299207 4611 projected.go:288] Couldn't get configMap openshift-machine-api/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.299227 4611 projected.go:194] Error preparing data for projected volume kube-api-access-dvrvq for pod openshift-machine-api/machine-api-operator-5694c8668f-kv8lp: failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.299272 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/14166aa7-7554-4165-9a14-f222a13d3c82-kube-api-access-dvrvq podName:14166aa7-7554-4165-9a14-f222a13d3c82 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:35.799255945 +0000 UTC m=+142.690775551 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-dvrvq" (UniqueName: "kubernetes.io/projected/14166aa7-7554-4165-9a14-f222a13d3c82-kube-api-access-dvrvq") pod "machine-api-operator-5694c8668f-kv8lp" (UID: "14166aa7-7554-4165-9a14-f222a13d3c82") : failed to sync configmap cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.304045 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.324819 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.341439 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.348056 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.348848 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.353145 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" event={"ID":"7d3dc824-e247-4a32-8870-95ee23ca6241","Type":"ContainerStarted","Data":"e5ca73f49debfde6993a98ca8ad1342b50f7496cdcdc9290f6e4e3e7679b633d"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.355953 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.356506 4611 generic.go:334] "Generic (PLEG): container finished" podID="ca3bb61b-97fa-4e78-b84c-66b82267072d" containerID="6c270c33b6a17289c636b88814eac72047be1aa3c6f01e7ce6d3ccdc1f844d4c" exitCode=0 Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.357865 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" event={"ID":"ca3bb61b-97fa-4e78-b84c-66b82267072d","Type":"ContainerDied","Data":"6c270c33b6a17289c636b88814eac72047be1aa3c6f01e7ce6d3ccdc1f844d4c"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.357893 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" event={"ID":"ca3bb61b-97fa-4e78-b84c-66b82267072d","Type":"ContainerStarted","Data":"1efaed85e3b2d8fb9758ba58d6848c0e0feead9266dd02ff8cdd743b8b48beb1"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.365043 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.365327 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-p68fp" event={"ID":"85b3a01b-0801-431c-aa43-2a0170aeb76f","Type":"ContainerStarted","Data":"ea8cba5d72c9550ac7a2b4258b7796f675d36729364d47408e005a427e840849"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.365367 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-p68fp" event={"ID":"85b3a01b-0801-431c-aa43-2a0170aeb76f","Type":"ContainerStarted","Data":"27cf004bedf5b8a72d72f6db5a9303145f21f3077be83b02f4ab96974797238e"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.365951 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.368018 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" event={"ID":"1387ce40-000f-4e95-adda-a347b7574779","Type":"ContainerStarted","Data":"9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.368043 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" event={"ID":"1387ce40-000f-4e95-adda-a347b7574779","Type":"ContainerStarted","Data":"6f488137c416ba8974e9b5a6aa919809649a580fae90a9d7d82cd8606d975def"} Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.368441 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.368665 4611 patch_prober.go:28] interesting pod/console-operator-58897d9998-p68fp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/readyz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.368697 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-p68fp" podUID="85b3a01b-0801-431c-aa43-2a0170aeb76f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.5:8443/readyz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.383230 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.393967 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.417887 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-tls\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.417982 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-default-certificate\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.418011 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-certificates\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.418049 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e598e87c-ea30-47db-9f40-1155b730e8d4-config\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.418078 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.418127 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e598e87c-ea30-47db-9f40-1155b730e8d4-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.421752 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e598e87c-ea30-47db-9f40-1155b730e8d4-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.421797 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmp6s\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-kube-api-access-cmp6s\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.421832 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-metrics-certs\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.421877 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d1ca0ac6-c50d-41d8-964e-5c065edd7197-service-ca-bundle\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.421956 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.421997 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxdhb\" (UniqueName: \"kubernetes.io/projected/d1ca0ac6-c50d-41d8-964e-5c065edd7197-kube-api-access-rxdhb\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.422036 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-stats-auth\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.422102 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-trusted-ca\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.422119 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.422159 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-bound-sa-token\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.422577 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:35.922562741 +0000 UTC m=+142.814082347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.424811 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.437553 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.443436 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.472595 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.483208 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.486988 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.504554 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.511358 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.527494 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.527808 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-proxy-tls\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.527843 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkcfc\" (UniqueName: \"kubernetes.io/projected/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-kube-api-access-gkcfc\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.527982 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-csi-data-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.528001 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwlgb\" (UniqueName: \"kubernetes.io/projected/947d6530-c9b4-45cb-9f52-d83f26bda55a-kube-api-access-wwlgb\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.528212 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27knm\" (UniqueName: \"kubernetes.io/projected/b5166b57-5645-49e8-870c-108f64403797-kube-api-access-27knm\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.528234 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhm78\" (UniqueName: \"kubernetes.io/projected/615f5d32-538b-4c6a-8bb9-57eedec2a126-kube-api-access-vhm78\") pod \"package-server-manager-789f6589d5-nk4d7\" (UID: \"615f5d32-538b-4c6a-8bb9-57eedec2a126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.528465 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-metrics-certs\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.528486 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0eececdb-2bc0-45ec-88e3-7b64e2043876-auth-proxy-config\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.529754 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-config\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.530966 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.030947215 +0000 UTC m=+142.922466821 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.530992 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d1ca0ac6-c50d-41d8-964e-5c065edd7197-service-ca-bundle\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531010 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-socket-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531040 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5qb5\" (UniqueName: \"kubernetes.io/projected/0842824a-309a-4133-a66d-c393cf09be7a-kube-api-access-d5qb5\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531163 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b5166b57-5645-49e8-870c-108f64403797-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531196 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjtfk\" (UniqueName: \"kubernetes.io/projected/00b46678-03aa-4e70-af99-c622eb2b2508-kube-api-access-jjtfk\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531213 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2aa1ce01-d73b-48a2-bc62-b361f8861131-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531232 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk6kc\" (UniqueName: \"kubernetes.io/projected/7a5592e9-b443-4f48-b970-8e7b52c722f0-kube-api-access-gk6kc\") pod \"multus-admission-controller-857f4d67dd-ms74j\" (UID: \"7a5592e9-b443-4f48-b970-8e7b52c722f0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531253 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxdhb\" (UniqueName: \"kubernetes.io/projected/d1ca0ac6-c50d-41d8-964e-5c065edd7197-kube-api-access-rxdhb\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531328 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq87s\" (UniqueName: \"kubernetes.io/projected/2aa1ce01-d73b-48a2-bc62-b361f8861131-kube-api-access-tq87s\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531346 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c590f65-36ab-4f95-92c7-b70cc303b448-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531373 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-249f6\" (UniqueName: \"kubernetes.io/projected/0eececdb-2bc0-45ec-88e3-7b64e2043876-kube-api-access-249f6\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531393 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531417 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-metrics-tls\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531454 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-stats-auth\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531471 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/651fdc30-c679-4d23-a889-2cf5e377afb1-webhook-cert\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531555 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpxp2\" (UniqueName: \"kubernetes.io/projected/c758f379-2019-4a2c-9ee3-12030ae2f85b-kube-api-access-bpxp2\") pod \"ingress-canary-smw45\" (UID: \"c758f379-2019-4a2c-9ee3-12030ae2f85b\") " pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531570 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/651fdc30-c679-4d23-a889-2cf5e377afb1-apiservice-cert\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531611 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/651fdc30-c679-4d23-a889-2cf5e377afb1-tmpfs\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531642 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-trusted-ca\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531663 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531678 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-bound-sa-token\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531713 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-tls\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531765 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-default-certificate\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531784 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7471b65-3a4e-461a-b10d-43c2b0715b36-serving-cert\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531812 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ed1460d-346f-4749-8a8d-107744d6b4a3-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wwvsv\" (UID: \"7ed1460d-346f-4749-8a8d-107744d6b4a3\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531828 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-config-volume\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531845 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbgt6\" (UniqueName: \"kubernetes.io/projected/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-kube-api-access-jbgt6\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531862 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-certificates\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531888 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/40660608-6535-4d31-b8af-564e51dae5c8-srv-cert\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-service-ca\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531956 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e598e87c-ea30-47db-9f40-1155b730e8d4-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.531972 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0842824a-309a-4133-a66d-c393cf09be7a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532179 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5755d\" (UniqueName: \"kubernetes.io/projected/651fdc30-c679-4d23-a889-2cf5e377afb1-kube-api-access-5755d\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532197 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e598e87c-ea30-47db-9f40-1155b730e8d4-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532214 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0eececdb-2bc0-45ec-88e3-7b64e2043876-images\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532228 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0842824a-309a-4133-a66d-c393cf09be7a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532243 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7j9r\" (UniqueName: \"kubernetes.io/projected/03b1269f-c666-43fc-b39b-30a0174f3a38-kube-api-access-x7j9r\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532306 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532341 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0eececdb-2bc0-45ec-88e3-7b64e2043876-proxy-tls\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532355 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c758f379-2019-4a2c-9ee3-12030ae2f85b-cert\") pod \"ingress-canary-smw45\" (UID: \"c758f379-2019-4a2c-9ee3-12030ae2f85b\") " pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532370 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/615f5d32-538b-4c6a-8bb9-57eedec2a126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nk4d7\" (UID: \"615f5d32-538b-4c6a-8bb9-57eedec2a126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532387 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7471b65-3a4e-461a-b10d-43c2b0715b36-config\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532407 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmp6s\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-kube-api-access-cmp6s\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532428 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/be6d0117-b4cb-4cf3-8ecf-7add7e040587-certs\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532466 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-mountpoint-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532480 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-registration-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532518 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-plugins-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532552 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl887\" (UniqueName: \"kubernetes.io/projected/4c590f65-36ab-4f95-92c7-b70cc303b448-kube-api-access-rl887\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532567 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d33d2c15-5789-4553-b00c-b9c2d5f332a2-secret-volume\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532582 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2aa1ce01-d73b-48a2-bc62-b361f8861131-srv-cert\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532605 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm8kr\" (UniqueName: \"kubernetes.io/projected/40660608-6535-4d31-b8af-564e51dae5c8-kube-api-access-mm8kr\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532640 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532658 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/be6d0117-b4cb-4cf3-8ecf-7add7e040587-node-bootstrap-token\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.532756 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.534708 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d1ca0ac6-c50d-41d8-964e-5c065edd7197-service-ca-bundle\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.541520 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4fxjt"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.543370 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.544772 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-trusted-ca\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.545319 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-certificates\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.545667 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sph75\" (UniqueName: \"kubernetes.io/projected/501f1b34-13aa-4b8c-8fac-d525ad0626b7-kube-api-access-sph75\") pod \"migrator-59844c95c7-lxmhx\" (UID: \"501f1b34-13aa-4b8c-8fac-d525ad0626b7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.546058 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.546104 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/40660608-6535-4d31-b8af-564e51dae5c8-profile-collector-cert\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.546223 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc9ck\" (UniqueName: \"kubernetes.io/projected/f7471b65-3a4e-461a-b10d-43c2b0715b36-kube-api-access-qc9ck\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.546403 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c590f65-36ab-4f95-92c7-b70cc303b448-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.546451 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbw9s\" (UniqueName: \"kubernetes.io/projected/d33d2c15-5789-4553-b00c-b9c2d5f332a2-kube-api-access-jbw9s\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.546927 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.046914866 +0000 UTC m=+142.938434472 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.548864 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/03b1269f-c666-43fc-b39b-30a0174f3a38-signing-cabundle\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.548889 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-ca\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.548929 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdss2\" (UniqueName: \"kubernetes.io/projected/7ed1460d-346f-4749-8a8d-107744d6b4a3-kube-api-access-xdss2\") pod \"control-plane-machine-set-operator-78cbb6b69f-wwvsv\" (UID: \"7ed1460d-346f-4749-8a8d-107744d6b4a3\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.548962 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a5592e9-b443-4f48-b970-8e7b52c722f0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-ms74j\" (UID: \"7a5592e9-b443-4f48-b970-8e7b52c722f0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549030 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-serving-cert\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549106 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b5166b57-5645-49e8-870c-108f64403797-trusted-ca\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549290 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/03b1269f-c666-43fc-b39b-30a0174f3a38-signing-key\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549326 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-client\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549373 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d33d2c15-5789-4553-b00c-b9c2d5f332a2-config-volume\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549425 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e598e87c-ea30-47db-9f40-1155b730e8d4-config\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549527 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44vdd\" (UniqueName: \"kubernetes.io/projected/be6d0117-b4cb-4cf3-8ecf-7add7e040587-kube-api-access-44vdd\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549593 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549652 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5166b57-5645-49e8-870c-108f64403797-metrics-tls\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549672 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69kp7\" (UniqueName: \"kubernetes.io/projected/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-kube-api-access-69kp7\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.549863 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.554523 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-default-certificate\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.555127 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e598e87c-ea30-47db-9f40-1155b730e8d4-config\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.555566 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-tls\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.557083 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e5af436c-0542-4160-8a3b-418382623820-auth-proxy-config\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.560195 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-stats-auth\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.561554 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.562097 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d1ca0ac6-c50d-41d8-964e-5c065edd7197-metrics-certs\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.562476 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e598e87c-ea30-47db-9f40-1155b730e8d4-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.565018 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.570294 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e5af436c-0542-4160-8a3b-418382623820-machine-approver-tls\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.588318 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.589337 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.600242 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-c7v9z"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.610722 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.618355 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.624532 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.644122 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.655953 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656119 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-plugins-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656152 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl887\" (UniqueName: \"kubernetes.io/projected/4c590f65-36ab-4f95-92c7-b70cc303b448-kube-api-access-rl887\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656171 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d33d2c15-5789-4553-b00c-b9c2d5f332a2-secret-volume\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656188 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2aa1ce01-d73b-48a2-bc62-b361f8861131-srv-cert\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656206 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm8kr\" (UniqueName: \"kubernetes.io/projected/40660608-6535-4d31-b8af-564e51dae5c8-kube-api-access-mm8kr\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656222 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656246 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/be6d0117-b4cb-4cf3-8ecf-7add7e040587-node-bootstrap-token\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656278 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sph75\" (UniqueName: \"kubernetes.io/projected/501f1b34-13aa-4b8c-8fac-d525ad0626b7-kube-api-access-sph75\") pod \"migrator-59844c95c7-lxmhx\" (UID: \"501f1b34-13aa-4b8c-8fac-d525ad0626b7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656312 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/40660608-6535-4d31-b8af-564e51dae5c8-profile-collector-cert\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656332 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc9ck\" (UniqueName: \"kubernetes.io/projected/f7471b65-3a4e-461a-b10d-43c2b0715b36-kube-api-access-qc9ck\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656349 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c590f65-36ab-4f95-92c7-b70cc303b448-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656364 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbw9s\" (UniqueName: \"kubernetes.io/projected/d33d2c15-5789-4553-b00c-b9c2d5f332a2-kube-api-access-jbw9s\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/03b1269f-c666-43fc-b39b-30a0174f3a38-signing-cabundle\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656395 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-ca\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656414 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdss2\" (UniqueName: \"kubernetes.io/projected/7ed1460d-346f-4749-8a8d-107744d6b4a3-kube-api-access-xdss2\") pod \"control-plane-machine-set-operator-78cbb6b69f-wwvsv\" (UID: \"7ed1460d-346f-4749-8a8d-107744d6b4a3\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656432 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a5592e9-b443-4f48-b970-8e7b52c722f0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-ms74j\" (UID: \"7a5592e9-b443-4f48-b970-8e7b52c722f0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656453 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-serving-cert\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656470 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b5166b57-5645-49e8-870c-108f64403797-trusted-ca\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656497 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/03b1269f-c666-43fc-b39b-30a0174f3a38-signing-key\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656512 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-client\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656530 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d33d2c15-5789-4553-b00c-b9c2d5f332a2-config-volume\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656548 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44vdd\" (UniqueName: \"kubernetes.io/projected/be6d0117-b4cb-4cf3-8ecf-7add7e040587-kube-api-access-44vdd\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656566 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5166b57-5645-49e8-870c-108f64403797-metrics-tls\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656581 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69kp7\" (UniqueName: \"kubernetes.io/projected/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-kube-api-access-69kp7\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656601 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-proxy-tls\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656617 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkcfc\" (UniqueName: \"kubernetes.io/projected/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-kube-api-access-gkcfc\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656654 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-csi-data-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656668 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwlgb\" (UniqueName: \"kubernetes.io/projected/947d6530-c9b4-45cb-9f52-d83f26bda55a-kube-api-access-wwlgb\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656685 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27knm\" (UniqueName: \"kubernetes.io/projected/b5166b57-5645-49e8-870c-108f64403797-kube-api-access-27knm\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656700 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhm78\" (UniqueName: \"kubernetes.io/projected/615f5d32-538b-4c6a-8bb9-57eedec2a126-kube-api-access-vhm78\") pod \"package-server-manager-789f6589d5-nk4d7\" (UID: \"615f5d32-538b-4c6a-8bb9-57eedec2a126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656718 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0eececdb-2bc0-45ec-88e3-7b64e2043876-auth-proxy-config\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656735 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-config\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656753 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5qb5\" (UniqueName: \"kubernetes.io/projected/0842824a-309a-4133-a66d-c393cf09be7a-kube-api-access-d5qb5\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656767 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-socket-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656785 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b5166b57-5645-49e8-870c-108f64403797-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656805 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjtfk\" (UniqueName: \"kubernetes.io/projected/00b46678-03aa-4e70-af99-c622eb2b2508-kube-api-access-jjtfk\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656819 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2aa1ce01-d73b-48a2-bc62-b361f8861131-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656835 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk6kc\" (UniqueName: \"kubernetes.io/projected/7a5592e9-b443-4f48-b970-8e7b52c722f0-kube-api-access-gk6kc\") pod \"multus-admission-controller-857f4d67dd-ms74j\" (UID: \"7a5592e9-b443-4f48-b970-8e7b52c722f0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656856 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq87s\" (UniqueName: \"kubernetes.io/projected/2aa1ce01-d73b-48a2-bc62-b361f8861131-kube-api-access-tq87s\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656873 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c590f65-36ab-4f95-92c7-b70cc303b448-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656889 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-249f6\" (UniqueName: \"kubernetes.io/projected/0eececdb-2bc0-45ec-88e3-7b64e2043876-kube-api-access-249f6\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656908 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656921 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-metrics-tls\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656937 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/651fdc30-c679-4d23-a889-2cf5e377afb1-webhook-cert\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656959 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpxp2\" (UniqueName: \"kubernetes.io/projected/c758f379-2019-4a2c-9ee3-12030ae2f85b-kube-api-access-bpxp2\") pod \"ingress-canary-smw45\" (UID: \"c758f379-2019-4a2c-9ee3-12030ae2f85b\") " pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.656977 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/651fdc30-c679-4d23-a889-2cf5e377afb1-apiservice-cert\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657002 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/651fdc30-c679-4d23-a889-2cf5e377afb1-tmpfs\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657025 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7471b65-3a4e-461a-b10d-43c2b0715b36-serving-cert\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657041 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ed1460d-346f-4749-8a8d-107744d6b4a3-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wwvsv\" (UID: \"7ed1460d-346f-4749-8a8d-107744d6b4a3\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657059 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-config-volume\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657074 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbgt6\" (UniqueName: \"kubernetes.io/projected/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-kube-api-access-jbgt6\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657089 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/40660608-6535-4d31-b8af-564e51dae5c8-srv-cert\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657104 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-service-ca\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657125 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0842824a-309a-4133-a66d-c393cf09be7a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657141 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5755d\" (UniqueName: \"kubernetes.io/projected/651fdc30-c679-4d23-a889-2cf5e377afb1-kube-api-access-5755d\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657156 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0eececdb-2bc0-45ec-88e3-7b64e2043876-images\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657171 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0842824a-309a-4133-a66d-c393cf09be7a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657186 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7j9r\" (UniqueName: \"kubernetes.io/projected/03b1269f-c666-43fc-b39b-30a0174f3a38-kube-api-access-x7j9r\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657209 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657224 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0eececdb-2bc0-45ec-88e3-7b64e2043876-proxy-tls\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657238 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c758f379-2019-4a2c-9ee3-12030ae2f85b-cert\") pod \"ingress-canary-smw45\" (UID: \"c758f379-2019-4a2c-9ee3-12030ae2f85b\") " pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657253 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/615f5d32-538b-4c6a-8bb9-57eedec2a126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nk4d7\" (UID: \"615f5d32-538b-4c6a-8bb9-57eedec2a126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657270 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7471b65-3a4e-461a-b10d-43c2b0715b36-config\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657289 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/be6d0117-b4cb-4cf3-8ecf-7add7e040587-certs\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657317 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-mountpoint-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657339 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-registration-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657659 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-registration-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.657827 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-socket-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.658306 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-config\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.658410 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.158394915 +0000 UTC m=+143.049914521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.658456 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-plugins-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.660964 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c590f65-36ab-4f95-92c7-b70cc303b448-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.661741 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.664610 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.665206 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b5166b57-5645-49e8-870c-108f64403797-trusted-ca\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.665507 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-csi-data-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.666521 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0eececdb-2bc0-45ec-88e3-7b64e2043876-auth-proxy-config\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.667509 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.669422 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/651fdc30-c679-4d23-a889-2cf5e377afb1-tmpfs\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.670358 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5166b57-5645-49e8-870c-108f64403797-metrics-tls\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.672982 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/03b1269f-c666-43fc-b39b-30a0174f3a38-signing-cabundle\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.675755 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d33d2c15-5789-4553-b00c-b9c2d5f332a2-secret-volume\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.676468 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-config-volume\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.676495 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-ca\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.679073 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0842824a-309a-4133-a66d-c393cf09be7a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.679669 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-service-ca\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.680392 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0eececdb-2bc0-45ec-88e3-7b64e2043876-images\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.680711 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.681212 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7471b65-3a4e-461a-b10d-43c2b0715b36-config\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.681319 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/947d6530-c9b4-45cb-9f52-d83f26bda55a-mountpoint-dir\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.682725 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-serving-cert\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.683241 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/651fdc30-c679-4d23-a889-2cf5e377afb1-webhook-cert\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.683983 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2aa1ce01-d73b-48a2-bc62-b361f8861131-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.684186 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.684189 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/651fdc30-c679-4d23-a889-2cf5e377afb1-apiservice-cert\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.697117 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/03b1269f-c666-43fc-b39b-30a0174f3a38-signing-key\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.698089 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.699502 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hml26"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.702038 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-metrics-tls\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.703501 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.706383 4611 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.706464 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls podName:14166aa7-7554-4165-9a14-f222a13d3c82 nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.706448743 +0000 UTC m=+143.597968419 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-kv8lp" (UID: "14166aa7-7554-4165-9a14-f222a13d3c82") : failed to sync secret cache: timed out waiting for the condition Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.709803 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d33d2c15-5789-4553-b00c-b9c2d5f332a2-config-volume\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.710883 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-proxy-tls\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.711918 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/40660608-6535-4d31-b8af-564e51dae5c8-profile-collector-cert\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.713119 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f7471b65-3a4e-461a-b10d-43c2b0715b36-serving-cert\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.713569 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c590f65-36ab-4f95-92c7-b70cc303b448-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.714221 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0842824a-309a-4133-a66d-c393cf09be7a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.714806 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ed1460d-346f-4749-8a8d-107744d6b4a3-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wwvsv\" (UID: \"7ed1460d-346f-4749-8a8d-107744d6b4a3\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.714884 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-etcd-client\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.715444 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/be6d0117-b4cb-4cf3-8ecf-7add7e040587-certs\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.715821 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a5592e9-b443-4f48-b970-8e7b52c722f0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-ms74j\" (UID: \"7a5592e9-b443-4f48-b970-8e7b52c722f0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.716381 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c758f379-2019-4a2c-9ee3-12030ae2f85b-cert\") pod \"ingress-canary-smw45\" (UID: \"c758f379-2019-4a2c-9ee3-12030ae2f85b\") " pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.718955 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2aa1ce01-d73b-48a2-bc62-b361f8861131-srv-cert\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.719677 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/be6d0117-b4cb-4cf3-8ecf-7add7e040587-node-bootstrap-token\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.719798 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0eececdb-2bc0-45ec-88e3-7b64e2043876-proxy-tls\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.720178 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/615f5d32-538b-4c6a-8bb9-57eedec2a126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nk4d7\" (UID: \"615f5d32-538b-4c6a-8bb9-57eedec2a126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.721098 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.722021 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/40660608-6535-4d31-b8af-564e51dae5c8-srv-cert\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.741070 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.741335 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.743821 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.760519 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clbts\" (UniqueName: \"kubernetes.io/projected/e5af436c-0542-4160-8a3b-418382623820-kube-api-access-clbts\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.760571 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.760856 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcv2b\" (UniqueName: \"kubernetes.io/projected/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-kube-api-access-kcv2b\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.760884 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm4ck\" (UniqueName: \"kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.764167 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.264153477 +0000 UTC m=+143.155673083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.767297 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm4ck\" (UniqueName: \"kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck\") pod \"oauth-openshift-558db77b4-qght9\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.768688 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clbts\" (UniqueName: \"kubernetes.io/projected/e5af436c-0542-4160-8a3b-418382623820-kube-api-access-clbts\") pod \"machine-approver-56656f9798-b9wt8\" (UID: \"e5af436c-0542-4160-8a3b-418382623820\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.770848 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcv2b\" (UniqueName: \"kubernetes.io/projected/8ed60ce8-626d-408f-8e0e-7b1e1855c47d-kube-api-access-kcv2b\") pod \"openshift-apiserver-operator-796bbdcf4f-m5xtn\" (UID: \"8ed60ce8-626d-408f-8e0e-7b1e1855c47d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.806981 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxdhb\" (UniqueName: \"kubernetes.io/projected/d1ca0ac6-c50d-41d8-964e-5c065edd7197-kube-api-access-rxdhb\") pod \"router-default-5444994796-6bg7b\" (UID: \"d1ca0ac6-c50d-41d8-964e-5c065edd7197\") " pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.822696 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e598e87c-ea30-47db-9f40-1155b730e8d4-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-44sf4\" (UID: \"e598e87c-ea30-47db-9f40-1155b730e8d4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.841702 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmp6s\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-kube-api-access-cmp6s\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.861195 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.864233 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvrvq\" (UniqueName: \"kubernetes.io/projected/14166aa7-7554-4165-9a14-f222a13d3c82-kube-api-access-dvrvq\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.864737 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.364708995 +0000 UTC m=+143.256228601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.866329 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-bound-sa-token\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.881381 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-jjrl6"] Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.889415 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5qb5\" (UniqueName: \"kubernetes.io/projected/0842824a-309a-4133-a66d-c393cf09be7a-kube-api-access-d5qb5\") pod \"openshift-controller-manager-operator-756b6f6bc6-mf6jx\" (UID: \"0842824a-309a-4133-a66d-c393cf09be7a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.890403 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvrvq\" (UniqueName: \"kubernetes.io/projected/14166aa7-7554-4165-9a14-f222a13d3c82-kube-api-access-dvrvq\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.904862 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b5166b57-5645-49e8-870c-108f64403797-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.924341 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjtfk\" (UniqueName: \"kubernetes.io/projected/00b46678-03aa-4e70-af99-c622eb2b2508-kube-api-access-jjtfk\") pod \"marketplace-operator-79b997595-rcs29\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:35 crc kubenswrapper[4611]: W0929 12:42:35.949252 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb819a41_a91e_439f_a34f_331f9731340a.slice/crio-ce48d75f174cbccc31eecda006afcdbdeb4dc768cab2d4a0b5dc113387017fa3 WatchSource:0}: Error finding container ce48d75f174cbccc31eecda006afcdbdeb4dc768cab2d4a0b5dc113387017fa3: Status 404 returned error can't find the container with id ce48d75f174cbccc31eecda006afcdbdeb4dc768cab2d4a0b5dc113387017fa3 Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.957496 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk6kc\" (UniqueName: \"kubernetes.io/projected/7a5592e9-b443-4f48-b970-8e7b52c722f0-kube-api-access-gk6kc\") pod \"multus-admission-controller-857f4d67dd-ms74j\" (UID: \"7a5592e9-b443-4f48-b970-8e7b52c722f0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.965392 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:35 crc kubenswrapper[4611]: E0929 12:42:35.965702 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.465692144 +0000 UTC m=+143.357211750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.970166 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.976973 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq87s\" (UniqueName: \"kubernetes.io/projected/2aa1ce01-d73b-48a2-bc62-b361f8861131-kube-api-access-tq87s\") pod \"olm-operator-6b444d44fb-9trd8\" (UID: \"2aa1ce01-d73b-48a2-bc62-b361f8861131\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.977535 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.979830 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl887\" (UniqueName: \"kubernetes.io/projected/4c590f65-36ab-4f95-92c7-b70cc303b448-kube-api-access-rl887\") pod \"kube-storage-version-migrator-operator-b67b599dd-r7brt\" (UID: \"4c590f65-36ab-4f95-92c7-b70cc303b448\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.997133 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" Sep 29 12:42:35 crc kubenswrapper[4611]: I0929 12:42:35.997900 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-249f6\" (UniqueName: \"kubernetes.io/projected/0eececdb-2bc0-45ec-88e3-7b64e2043876-kube-api-access-249f6\") pod \"machine-config-operator-74547568cd-59sbq\" (UID: \"0eececdb-2bc0-45ec-88e3-7b64e2043876\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.009968 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.019248 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.035257 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.044800 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.057548 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwlgb\" (UniqueName: \"kubernetes.io/projected/947d6530-c9b4-45cb-9f52-d83f26bda55a-kube-api-access-wwlgb\") pod \"csi-hostpathplugin-vbvkv\" (UID: \"947d6530-c9b4-45cb-9f52-d83f26bda55a\") " pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.059681 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.066795 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.067135 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.567110366 +0000 UTC m=+143.458629972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.067283 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.067640 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.567607979 +0000 UTC m=+143.459127585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.074487 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.076614 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.086485 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.088480 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27knm\" (UniqueName: \"kubernetes.io/projected/b5166b57-5645-49e8-870c-108f64403797-kube-api-access-27knm\") pod \"ingress-operator-5b745b69d9-k9jp6\" (UID: \"b5166b57-5645-49e8-870c-108f64403797\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.100348 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.117318 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69kp7\" (UniqueName: \"kubernetes.io/projected/8cc8c201-cfc2-4b2f-a69b-daf3f79ba847-kube-api-access-69kp7\") pod \"machine-config-controller-84d6567774-5ldbq\" (UID: \"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.125484 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.132517 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.135433 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.140657 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc9ck\" (UniqueName: \"kubernetes.io/projected/f7471b65-3a4e-461a-b10d-43c2b0715b36-kube-api-access-qc9ck\") pod \"service-ca-operator-777779d784-wc9wj\" (UID: \"f7471b65-3a4e-461a-b10d-43c2b0715b36\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.146310 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.159050 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8hq4m"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.160315 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm8kr\" (UniqueName: \"kubernetes.io/projected/40660608-6535-4d31-b8af-564e51dae5c8-kube-api-access-mm8kr\") pod \"catalog-operator-68c6474976-wg2qr\" (UID: \"40660608-6535-4d31-b8af-564e51dae5c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.165102 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkcfc\" (UniqueName: \"kubernetes.io/projected/453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d-kube-api-access-gkcfc\") pod \"dns-default-2fgwq\" (UID: \"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d\") " pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.165471 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpxp2\" (UniqueName: \"kubernetes.io/projected/c758f379-2019-4a2c-9ee3-12030ae2f85b-kube-api-access-bpxp2\") pod \"ingress-canary-smw45\" (UID: \"c758f379-2019-4a2c-9ee3-12030ae2f85b\") " pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.167258 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhm78\" (UniqueName: \"kubernetes.io/projected/615f5d32-538b-4c6a-8bb9-57eedec2a126-kube-api-access-vhm78\") pod \"package-server-manager-789f6589d5-nk4d7\" (UID: \"615f5d32-538b-4c6a-8bb9-57eedec2a126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.169488 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.173280 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.173526 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.673508835 +0000 UTC m=+143.565028441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.188134 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sph75\" (UniqueName: \"kubernetes.io/projected/501f1b34-13aa-4b8c-8fac-d525ad0626b7-kube-api-access-sph75\") pod \"migrator-59844c95c7-lxmhx\" (UID: \"501f1b34-13aa-4b8c-8fac-d525ad0626b7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" Sep 29 12:42:36 crc kubenswrapper[4611]: W0929 12:42:36.193730 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod232ad623_50fd_4e92_ac89_cb548fbf140b.slice/crio-fb04d6ae10ffc25847fbf8c64e49b313afb9d13261909069adb3f09918f07b75 WatchSource:0}: Error finding container fb04d6ae10ffc25847fbf8c64e49b313afb9d13261909069adb3f09918f07b75: Status 404 returned error can't find the container with id fb04d6ae10ffc25847fbf8c64e49b313afb9d13261909069adb3f09918f07b75 Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.196164 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" Sep 29 12:42:36 crc kubenswrapper[4611]: W0929 12:42:36.196803 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad4b3e69_ec86_4fba_bd07_af9f1532fed1.slice/crio-923330582b20c061ce74c4e60a45d556c7d720aa0e02ca6912302555beed5f62 WatchSource:0}: Error finding container 923330582b20c061ce74c4e60a45d556c7d720aa0e02ca6912302555beed5f62: Status 404 returned error can't find the container with id 923330582b20c061ce74c4e60a45d556c7d720aa0e02ca6912302555beed5f62 Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.200967 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44vdd\" (UniqueName: \"kubernetes.io/projected/be6d0117-b4cb-4cf3-8ecf-7add7e040587-kube-api-access-44vdd\") pod \"machine-config-server-7lv98\" (UID: \"be6d0117-b4cb-4cf3-8ecf-7add7e040587\") " pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.203081 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-smw45" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.219412 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7j9r\" (UniqueName: \"kubernetes.io/projected/03b1269f-c666-43fc-b39b-30a0174f3a38-kube-api-access-x7j9r\") pod \"service-ca-9c57cc56f-h94vp\" (UID: \"03b1269f-c666-43fc-b39b-30a0174f3a38\") " pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.255083 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdss2\" (UniqueName: \"kubernetes.io/projected/7ed1460d-346f-4749-8a8d-107744d6b4a3-kube-api-access-xdss2\") pod \"control-plane-machine-set-operator-78cbb6b69f-wwvsv\" (UID: \"7ed1460d-346f-4749-8a8d-107744d6b4a3\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.258957 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbgt6\" (UniqueName: \"kubernetes.io/projected/2ec0a709-b839-4a69-a6fd-2aaa11fdf388-kube-api-access-jbgt6\") pod \"etcd-operator-b45778765-jmp7r\" (UID: \"2ec0a709-b839-4a69-a6fd-2aaa11fdf388\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.271146 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.271425 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.771415289 +0000 UTC m=+143.662934895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.292759 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5755d\" (UniqueName: \"kubernetes.io/projected/651fdc30-c679-4d23-a889-2cf5e377afb1-kube-api-access-5755d\") pod \"packageserver-d55dfcdfc-qkkfv\" (UID: \"651fdc30-c679-4d23-a889-2cf5e377afb1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.331799 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbw9s\" (UniqueName: \"kubernetes.io/projected/d33d2c15-5789-4553-b00c-b9c2d5f332a2-kube-api-access-jbw9s\") pod \"collect-profiles-29319150-w8dd5\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.339055 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.350790 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.360028 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.369959 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.374278 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.374979 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.874940819 +0000 UTC m=+143.766460425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.392488 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-jjrl6" event={"ID":"fb819a41-a91e-439f-a34f-331f9731340a","Type":"ContainerStarted","Data":"ce48d75f174cbccc31eecda006afcdbdeb4dc768cab2d4a0b5dc113387017fa3"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.394009 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.397496 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" event={"ID":"4348f944-d51c-4fdc-8789-646958d61216","Type":"ContainerStarted","Data":"a7a642f434a8107a4dc4b9925abe810351bcf2189f691d38dd599ed8b7b00729"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.403966 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-c7v9z" event={"ID":"c1afa714-1c99-43a8-ba3f-96af7f49abd5","Type":"ContainerStarted","Data":"bad3e569cd6cccfcd9818e3b6662db0cd1ee39062a6fa4d0b4629a8913f2a76a"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.404891 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" event={"ID":"0732fc85-8bf5-473f-ba34-50f375d7332d","Type":"ContainerStarted","Data":"838c336c27e00a66e371d4e58916e4d141cd6a817516f0abc80131f87cc41c2e"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.407103 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.407406 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" event={"ID":"a2434a6e-935b-4dde-ad5c-cbb8364c7034","Type":"ContainerStarted","Data":"974ec671ff8f786d99511c003537f0b9829b44ddb86536f4859c63e29fdee301"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.411817 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" event={"ID":"13efc77e-d80b-4d27-adc7-e93303568154","Type":"ContainerStarted","Data":"429637ab3adb6df09c3bbe4e79f1078628938bf0ff2efe379062673f2de2fe29"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.413392 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" event={"ID":"49949be7-0f3b-46cd-b895-079221aa632d","Type":"ContainerStarted","Data":"28e806fbf5fa6f0bb15f234f8dfa58bc7bd0d3c92bccc14986fefcd3c8756392"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.413415 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" event={"ID":"49949be7-0f3b-46cd-b895-079221aa632d","Type":"ContainerStarted","Data":"7d85b33435bb260ecc0afe916a580270ec3fdf0ac385f2afc8fa3377e5cb25eb"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.414315 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.414713 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" event={"ID":"cf70983e-cbf7-4c9d-ab94-2aceade70418","Type":"ContainerStarted","Data":"135f6bd6a3240f8bab1fee9f54db33bb9a886e58461f92eb4a6c1630d65099d0"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.414734 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" event={"ID":"cf70983e-cbf7-4c9d-ab94-2aceade70418","Type":"ContainerStarted","Data":"4b6c118c036bf34fd8516b373e65015b0d5657f207ac515860c8493751780316"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.416230 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" event={"ID":"232ad623-50fd-4e92-ac89-cb548fbf140b","Type":"ContainerStarted","Data":"fb04d6ae10ffc25847fbf8c64e49b313afb9d13261909069adb3f09918f07b75"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.418565 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.423553 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" event={"ID":"7d3dc824-e247-4a32-8870-95ee23ca6241","Type":"ContainerStarted","Data":"be150d6ad9ca34da66f6b308233a377d8a0f3ff994b29140077884ec0f42ce78"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.425243 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" event={"ID":"ad4b3e69-ec86-4fba-bd07-af9f1532fed1","Type":"ContainerStarted","Data":"923330582b20c061ce74c4e60a45d556c7d720aa0e02ca6912302555beed5f62"} Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.426432 4611 patch_prober.go:28] interesting pod/console-operator-58897d9998-p68fp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/readyz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.426468 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-p68fp" podUID="85b3a01b-0801-431c-aa43-2a0170aeb76f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.5:8443/readyz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.439519 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.452838 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.460820 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.477794 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7lv98" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.478962 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.479318 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:36.979304612 +0000 UTC m=+143.870824218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.581565 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.581867 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.081842413 +0000 UTC m=+143.973362019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.582071 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.583571 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.083563801 +0000 UTC m=+143.975083407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.602328 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.660067 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-ms74j"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.683871 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.684170 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.18415633 +0000 UTC m=+144.075675936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: W0929 12:42:36.688074 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c590f65_36ab_4f95_92c7_b70cc303b448.slice/crio-196250991eea2c5eb370d0ee546ea9c9f6669174608b00db9a8c0edc084c546f WatchSource:0}: Error finding container 196250991eea2c5eb370d0ee546ea9c9f6669174608b00db9a8c0edc084c546f: Status 404 returned error can't find the container with id 196250991eea2c5eb370d0ee546ea9c9f6669174608b00db9a8c0edc084c546f Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.787220 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.787705 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.788205 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.288184253 +0000 UTC m=+144.179703929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.798315 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/14166aa7-7554-4165-9a14-f222a13d3c82-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kv8lp\" (UID: \"14166aa7-7554-4165-9a14-f222a13d3c82\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:36 crc kubenswrapper[4611]: W0929 12:42:36.809459 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ed60ce8_626d_408f_8e0e_7b1e1855c47d.slice/crio-dae1461a1580fa63adf7c4c916bb4031d27ea6f1479fd16e1d13e03949e85840 WatchSource:0}: Error finding container dae1461a1580fa63adf7c4c916bb4031d27ea6f1479fd16e1d13e03949e85840: Status 404 returned error can't find the container with id dae1461a1580fa63adf7c4c916bb4031d27ea6f1479fd16e1d13e03949e85840 Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.889093 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.889882 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.389863732 +0000 UTC m=+144.281383338 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.898032 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vbvkv"] Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.939234 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" Sep 29 12:42:36 crc kubenswrapper[4611]: I0929 12:42:36.996808 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:36 crc kubenswrapper[4611]: E0929 12:42:36.997151 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.497138775 +0000 UTC m=+144.388658381 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.017289 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-2fgwq"] Sep 29 12:42:37 crc kubenswrapper[4611]: W0929 12:42:37.050559 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5af436c_0542_4160_8a3b_418382623820.slice/crio-3b253c42e403ac6a75d3d520b26dd41271609a637e21d02b05971d993d854834 WatchSource:0}: Error finding container 3b253c42e403ac6a75d3d520b26dd41271609a637e21d02b05971d993d854834: Status 404 returned error can't find the container with id 3b253c42e403ac6a75d3d520b26dd41271609a637e21d02b05971d993d854834 Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.099706 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.099832 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.599814302 +0000 UTC m=+144.491333908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.099903 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.100242 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.600234763 +0000 UTC m=+144.491754369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.227409 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.227933 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.72791828 +0000 UTC m=+144.619437886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.323038 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8"] Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.335052 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.335615 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.835600415 +0000 UTC m=+144.727120021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.386931 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-p68fp" podStartSLOduration=120.386912172 podStartE2EDuration="2m0.386912172s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:37.385826542 +0000 UTC m=+144.277346158" watchObservedRunningTime="2025-09-29 12:42:37.386912172 +0000 UTC m=+144.278431788" Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.429833 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq"] Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.442101 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.442480 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:37.942464727 +0000 UTC m=+144.833984333 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.499105 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" event={"ID":"4c590f65-36ab-4f95-92c7-b70cc303b448","Type":"ContainerStarted","Data":"196250991eea2c5eb370d0ee546ea9c9f6669174608b00db9a8c0edc084c546f"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.514842 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" event={"ID":"e5af436c-0542-4160-8a3b-418382623820","Type":"ContainerStarted","Data":"3b253c42e403ac6a75d3d520b26dd41271609a637e21d02b05971d993d854834"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.523080 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6bg7b" event={"ID":"d1ca0ac6-c50d-41d8-964e-5c065edd7197","Type":"ContainerStarted","Data":"54e7e36e030efa2ff8c5b8f8dcb4eb7147bf78425fddff5d33587ed9833048c9"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.530016 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-c7v9z" event={"ID":"c1afa714-1c99-43a8-ba3f-96af7f49abd5","Type":"ContainerStarted","Data":"e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.538222 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" event={"ID":"0732fc85-8bf5-473f-ba34-50f375d7332d","Type":"ContainerStarted","Data":"8b59f35b3ccfae6f81b6f4504eda77102ef8fcd1c367986ea36d2b20c8484ee7"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.544128 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.549896 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.049877594 +0000 UTC m=+144.941397200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.550197 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" event={"ID":"8ed60ce8-626d-408f-8e0e-7b1e1855c47d","Type":"ContainerStarted","Data":"dae1461a1580fa63adf7c4c916bb4031d27ea6f1479fd16e1d13e03949e85840"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.552895 4611 generic.go:334] "Generic (PLEG): container finished" podID="cf70983e-cbf7-4c9d-ab94-2aceade70418" containerID="135f6bd6a3240f8bab1fee9f54db33bb9a886e58461f92eb4a6c1630d65099d0" exitCode=0 Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.552954 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" event={"ID":"cf70983e-cbf7-4c9d-ab94-2aceade70418","Type":"ContainerDied","Data":"135f6bd6a3240f8bab1fee9f54db33bb9a886e58461f92eb4a6c1630d65099d0"} Sep 29 12:42:37 crc kubenswrapper[4611]: W0929 12:42:37.610813 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2aa1ce01_d73b_48a2_bc62_b361f8861131.slice/crio-8f6928456dd1e51d3e7cecac4ab3b69350b10ebb330354071d33c871d5bf9ff8 WatchSource:0}: Error finding container 8f6928456dd1e51d3e7cecac4ab3b69350b10ebb330354071d33c871d5bf9ff8: Status 404 returned error can't find the container with id 8f6928456dd1e51d3e7cecac4ab3b69350b10ebb330354071d33c871d5bf9ff8 Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.611051 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-jjrl6" event={"ID":"fb819a41-a91e-439f-a34f-331f9731340a","Type":"ContainerStarted","Data":"ecf150e7ae6d6ff45859aef912a9e2625e2ae6e53669721d0aa333f7031a7301"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.611867 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.634235 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.634330 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2fgwq" event={"ID":"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d","Type":"ContainerStarted","Data":"feac494bd93913052b29e632e625653c52ee2acb63b1e8b1a868288b5c987cb7"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.634374 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.651300 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.651771 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.151556383 +0000 UTC m=+145.043075989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.652366 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.658225 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.158169366 +0000 UTC m=+145.049689052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.665969 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rcs29"] Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.693891 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" event={"ID":"ca3bb61b-97fa-4e78-b84c-66b82267072d","Type":"ContainerStarted","Data":"d92c1f6d4010c49838dea8d0aaeee1e9bd97d97c0681cc3eda57aa7de09c50ba"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.701999 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq"] Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.707469 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" event={"ID":"4348f944-d51c-4fdc-8789-646958d61216","Type":"ContainerStarted","Data":"ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.708464 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.723857 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgnqs" podStartSLOduration=119.723837619 podStartE2EDuration="1m59.723837619s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:37.721867025 +0000 UTC m=+144.613386641" watchObservedRunningTime="2025-09-29 12:42:37.723837619 +0000 UTC m=+144.615357225" Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.728351 4611 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-hml26 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.728410 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" podUID="4348f944-d51c-4fdc-8789-646958d61216" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.754419 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.754950 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.254927968 +0000 UTC m=+145.146447574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.773286 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" event={"ID":"7a5592e9-b443-4f48-b970-8e7b52c722f0","Type":"ContainerStarted","Data":"825b28e87e635a4bf826a5304b64de3b8f54961767319c9c620e6115254f99f3"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.784524 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" event={"ID":"947d6530-c9b4-45cb-9f52-d83f26bda55a","Type":"ContainerStarted","Data":"2e44765acd41520d04d6ae2503d3c4637496bfcc1232a4a5eb1f4dfdcc34241f"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.784582 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" event={"ID":"13efc77e-d80b-4d27-adc7-e93303568154","Type":"ContainerStarted","Data":"c8fcb602798d115ed1d60ccbbfbc3f492a8a4702b0122074307f1a8938fb38ef"} Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.810100 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qght9"] Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.857595 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.857876 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.357862612 +0000 UTC m=+145.249382208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: W0929 12:42:37.868430 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00b46678_03aa_4e70_af99_c622eb2b2508.slice/crio-76dc0f0c553516333867ee66bab427164fd244cce4d3b37a78e4fc7347e9a206 WatchSource:0}: Error finding container 76dc0f0c553516333867ee66bab427164fd244cce4d3b37a78e4fc7347e9a206: Status 404 returned error can't find the container with id 76dc0f0c553516333867ee66bab427164fd244cce4d3b37a78e4fc7347e9a206 Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.932880 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-smw45"] Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.958370 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:37 crc kubenswrapper[4611]: E0929 12:42:37.960193 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.460174478 +0000 UTC m=+145.351694084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:37 crc kubenswrapper[4611]: I0929 12:42:37.964459 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.034825 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" podStartSLOduration=120.03480738 podStartE2EDuration="2m0.03480738s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:37.99969245 +0000 UTC m=+144.891212056" watchObservedRunningTime="2025-09-29 12:42:38.03480738 +0000 UTC m=+144.926326986" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.035907 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.068449 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.069011 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.568994164 +0000 UTC m=+145.460513770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.076711 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.082613 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.161894 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.169381 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.169717 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.669702026 +0000 UTC m=+145.561221632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.174359 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.180743 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.262578 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.283862 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.284224 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.784210859 +0000 UTC m=+145.675730465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.352188 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-c7v9z" podStartSLOduration=121.352171006 podStartE2EDuration="2m1.352171006s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:38.34794206 +0000 UTC m=+145.239461666" watchObservedRunningTime="2025-09-29 12:42:38.352171006 +0000 UTC m=+145.243690612" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.352890 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hffln" podStartSLOduration=120.352885206 podStartE2EDuration="2m0.352885206s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:38.282827861 +0000 UTC m=+145.174347477" watchObservedRunningTime="2025-09-29 12:42:38.352885206 +0000 UTC m=+145.244404802" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.388772 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.389184 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.889169189 +0000 UTC m=+145.780688795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.412568 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-4fxjt" podStartSLOduration=121.412554574 podStartE2EDuration="2m1.412554574s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:38.411916607 +0000 UTC m=+145.303436213" watchObservedRunningTime="2025-09-29 12:42:38.412554574 +0000 UTC m=+145.304074180" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.439717 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-jjrl6" podStartSLOduration=120.439691794 podStartE2EDuration="2m0.439691794s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:38.437176115 +0000 UTC m=+145.328695741" watchObservedRunningTime="2025-09-29 12:42:38.439691794 +0000 UTC m=+145.331211400" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.490538 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.490871 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:38.990859578 +0000 UTC m=+145.882379184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.525771 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" podStartSLOduration=120.525756052 podStartE2EDuration="2m0.525756052s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:38.478219258 +0000 UTC m=+145.369738874" watchObservedRunningTime="2025-09-29 12:42:38.525756052 +0000 UTC m=+145.417275658" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.526481 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jmp7r"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.595859 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.596462 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.096441214 +0000 UTC m=+145.987960820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.602229 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.624312 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr"] Sep 29 12:42:38 crc kubenswrapper[4611]: W0929 12:42:38.680140 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd33d2c15_5789_4553_b00c_b9c2d5f332a2.slice/crio-66949a20cb106a3e2134526d7eedff1d50fb87af715ba52e1c7303a5868da957 WatchSource:0}: Error finding container 66949a20cb106a3e2134526d7eedff1d50fb87af715ba52e1c7303a5868da957: Status 404 returned error can't find the container with id 66949a20cb106a3e2134526d7eedff1d50fb87af715ba52e1c7303a5868da957 Sep 29 12:42:38 crc kubenswrapper[4611]: W0929 12:42:38.691374 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40660608_6535_4d31_b8af_564e51dae5c8.slice/crio-64697dc077555cf45f61663e7ace872e3e61fcf7c56d11d7e28e7f16ab99423e WatchSource:0}: Error finding container 64697dc077555cf45f61663e7ace872e3e61fcf7c56d11d7e28e7f16ab99423e: Status 404 returned error can't find the container with id 64697dc077555cf45f61663e7ace872e3e61fcf7c56d11d7e28e7f16ab99423e Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.697448 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.698060 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.198044861 +0000 UTC m=+146.089564467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.704179 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-h94vp"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.798470 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.798579 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.298556007 +0000 UTC m=+146.190075613 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.798883 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.805323 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.305301914 +0000 UTC m=+146.196821520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.806518 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" event={"ID":"651fdc30-c679-4d23-a889-2cf5e377afb1","Type":"ContainerStarted","Data":"748c77721ca5a191247a0837020ebd0de7eaefe0562f520273673eeb5183031a"} Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.867380 4611 generic.go:334] "Generic (PLEG): container finished" podID="a2434a6e-935b-4dde-ad5c-cbb8364c7034" containerID="87c88b40f78ba54c7e7127e80aacffef55523daf8c90a46ed39bef27e505711f" exitCode=0 Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.867484 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" event={"ID":"a2434a6e-935b-4dde-ad5c-cbb8364c7034","Type":"ContainerDied","Data":"87c88b40f78ba54c7e7127e80aacffef55523daf8c90a46ed39bef27e505711f"} Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.885645 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" event={"ID":"501f1b34-13aa-4b8c-8fac-d525ad0626b7","Type":"ContainerStarted","Data":"605ae6bba42353349d4880569ec54cd26f215cda7fc1a282818b27847e431ac8"} Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.903205 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" event={"ID":"03b1269f-c666-43fc-b39b-30a0174f3a38","Type":"ContainerStarted","Data":"8b0e0d79ab6347ae07a2f92c7f283b7b333dfd0b3d4478787b2bdd1216065c2f"} Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.903587 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.903669 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.40365351 +0000 UTC m=+146.295173116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.904115 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:38 crc kubenswrapper[4611]: E0929 12:42:38.905514 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.405496921 +0000 UTC m=+146.297016567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.921389 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" event={"ID":"f7471b65-3a4e-461a-b10d-43c2b0715b36","Type":"ContainerStarted","Data":"a6f7d49007785fa42fd51f2955a41b7666820488be7a30253131df4b9032b3de"} Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.953175 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-kv8lp"] Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.971721 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" event={"ID":"8ed60ce8-626d-408f-8e0e-7b1e1855c47d","Type":"ContainerStarted","Data":"f3eff6b16f0f15dfbea0fad0bdee54e9ae295d5079b9168c8251865e8fca9e29"} Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.987734 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m5xtn" podStartSLOduration=121.987713352 podStartE2EDuration="2m1.987713352s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:38.98617217 +0000 UTC m=+145.877691796" watchObservedRunningTime="2025-09-29 12:42:38.987713352 +0000 UTC m=+145.879232958" Sep 29 12:42:38 crc kubenswrapper[4611]: I0929 12:42:38.995882 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" event={"ID":"2ec0a709-b839-4a69-a6fd-2aaa11fdf388","Type":"ContainerStarted","Data":"809adabc8aec765ba94214c5e2afd234799dc522f9ceb5bba2f18f05b232f063"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.005000 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.006408 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.506392358 +0000 UTC m=+146.397911964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.006936 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" event={"ID":"e598e87c-ea30-47db-9f40-1155b730e8d4","Type":"ContainerStarted","Data":"5b6ac4372d2150fd6db8e0a1bfadf7b12751d2aeec2d06c2d84957e767fbb7ab"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.047785 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" event={"ID":"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847","Type":"ContainerStarted","Data":"806167aa2359795ad1432b2f594fe50ffe2a63148013290b4733e94098b5da02"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.060481 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" event={"ID":"00b46678-03aa-4e70-af99-c622eb2b2508","Type":"ContainerStarted","Data":"76dc0f0c553516333867ee66bab427164fd244cce4d3b37a78e4fc7347e9a206"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.084896 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" event={"ID":"615f5d32-538b-4c6a-8bb9-57eedec2a126","Type":"ContainerStarted","Data":"0263eaf7024d91abe106f5478f68b19adeb4f9f0c8413fc0ba4db3591f257d6c"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.089271 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" event={"ID":"b5166b57-5645-49e8-870c-108f64403797","Type":"ContainerStarted","Data":"0497fe4148f9695742e8a752790d72cdd7b947bab85fd01706f57547722e0bda"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.093423 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" event={"ID":"d33d2c15-5789-4553-b00c-b9c2d5f332a2","Type":"ContainerStarted","Data":"66949a20cb106a3e2134526d7eedff1d50fb87af715ba52e1c7303a5868da957"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.095702 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" event={"ID":"0eececdb-2bc0-45ec-88e3-7b64e2043876","Type":"ContainerStarted","Data":"ad03716965e0fcfe5a0f34708678b31b7cbe4bb6fec5a56849e93f031168f42a"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.110583 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.111114 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.611100751 +0000 UTC m=+146.502620357 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.116028 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" event={"ID":"7ed1460d-346f-4749-8a8d-107744d6b4a3","Type":"ContainerStarted","Data":"2c69e848321c687484a091317c1356e2539a8ba40ffec8b4cdad9b65d6d78379"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.128310 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" event={"ID":"ad4b3e69-ec86-4fba-bd07-af9f1532fed1","Type":"ContainerStarted","Data":"21e6daef409af1a986dc44fc7290c23ce0f35f49f1e2d3d51d5cb3bd7168a6e1"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.136979 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" event={"ID":"232ad623-50fd-4e92-ac89-cb548fbf140b","Type":"ContainerStarted","Data":"a9fa766b82fa0b338c6c594da8bcbdecd274ceff2f1e548d8fda738b433462c8"} Sep 29 12:42:39 crc kubenswrapper[4611]: W0929 12:42:39.141290 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14166aa7_7554_4165_9a14_f222a13d3c82.slice/crio-8035686dcc2b281da2c112273b425967d3c536015d50bc5b6afa2b56fe796df9 WatchSource:0}: Error finding container 8035686dcc2b281da2c112273b425967d3c536015d50bc5b6afa2b56fe796df9: Status 404 returned error can't find the container with id 8035686dcc2b281da2c112273b425967d3c536015d50bc5b6afa2b56fe796df9 Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.205161 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7lv98" event={"ID":"be6d0117-b4cb-4cf3-8ecf-7add7e040587","Type":"ContainerStarted","Data":"918e2daf5064efabec0c5286bf582e2adcb307b1ab792d7aed9a4ae3774fc96f"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.211725 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.212113 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.712095941 +0000 UTC m=+146.603615547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.285964 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6bg7b" event={"ID":"d1ca0ac6-c50d-41d8-964e-5c065edd7197","Type":"ContainerStarted","Data":"1158abda3470be536056ed1c119ec9304708c31e463074a995245bcaba42430d"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.315928 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" event={"ID":"2aa1ce01-d73b-48a2-bc62-b361f8861131","Type":"ContainerStarted","Data":"26edfe72665f2f1ab5a2ff77eea01d7e3b676342c522b8fb021aac69e029586e"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.316051 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" event={"ID":"2aa1ce01-d73b-48a2-bc62-b361f8861131","Type":"ContainerStarted","Data":"8f6928456dd1e51d3e7cecac4ab3b69350b10ebb330354071d33c871d5bf9ff8"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.316946 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.317314 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-46fsb" podStartSLOduration=121.317295307 podStartE2EDuration="2m1.317295307s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:39.183859631 +0000 UTC m=+146.075379237" watchObservedRunningTime="2025-09-29 12:42:39.317295307 +0000 UTC m=+146.208814913" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.317439 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.318080 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.818067578 +0000 UTC m=+146.709587184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.324444 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" podStartSLOduration=122.324427014 podStartE2EDuration="2m2.324427014s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:39.317179044 +0000 UTC m=+146.208698670" watchObservedRunningTime="2025-09-29 12:42:39.324427014 +0000 UTC m=+146.215946620" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.331909 4611 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9trd8 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.331962 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" podUID="2aa1ce01-d73b-48a2-bc62-b361f8861131" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.368311 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-smw45" event={"ID":"c758f379-2019-4a2c-9ee3-12030ae2f85b","Type":"ContainerStarted","Data":"2a2d26fd11c2705cb4ca40b1658103e2f8f78de584dd2cd1f754f8ded4fcc622"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.376373 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" event={"ID":"f5f3de88-84fb-42b9-953f-74f002a8af28","Type":"ContainerStarted","Data":"c77410c5869733d44abfc79f9f01ac0b23d948c3fe47ee9683d6701f81b704a6"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.383585 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" event={"ID":"4c590f65-36ab-4f95-92c7-b70cc303b448","Type":"ContainerStarted","Data":"b4d60bf47ab0f2d2dca16d2828e6a4a2bedffefa0ed8f41b5f1077188817d4c7"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.408897 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" event={"ID":"40660608-6535-4d31-b8af-564e51dae5c8","Type":"ContainerStarted","Data":"64697dc077555cf45f61663e7ace872e3e61fcf7c56d11d7e28e7f16ab99423e"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.411354 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" event={"ID":"0842824a-309a-4133-a66d-c393cf09be7a","Type":"ContainerStarted","Data":"9962b8fb6f1a17fd3b564b1e4529b61b7b6622a0b9b47ad4664e22ff71fa4656"} Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.411419 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.411444 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.418188 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.419128 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-6bg7b" podStartSLOduration=121.419109579 podStartE2EDuration="2m1.419109579s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:39.362759183 +0000 UTC m=+146.254278789" watchObservedRunningTime="2025-09-29 12:42:39.419109579 +0000 UTC m=+146.310629175" Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.419927 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:39.919909951 +0000 UTC m=+146.811429557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.438577 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.485727 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" podStartSLOduration=121.485704499 podStartE2EDuration="2m1.485704499s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:39.418919654 +0000 UTC m=+146.310439270" watchObservedRunningTime="2025-09-29 12:42:39.485704499 +0000 UTC m=+146.377224105" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.486910 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-smw45" podStartSLOduration=6.486903882 podStartE2EDuration="6.486903882s" podCreationTimestamp="2025-09-29 12:42:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:39.47996164 +0000 UTC m=+146.371481246" watchObservedRunningTime="2025-09-29 12:42:39.486903882 +0000 UTC m=+146.378423488" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.519329 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.519873 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.019861082 +0000 UTC m=+146.911380688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.529377 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r7brt" podStartSLOduration=121.529358125 podStartE2EDuration="2m1.529358125s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:39.525879399 +0000 UTC m=+146.417399005" watchObservedRunningTime="2025-09-29 12:42:39.529358125 +0000 UTC m=+146.420877731" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.620704 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.621243 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.121215522 +0000 UTC m=+147.012735128 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.725300 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.725603 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.225592215 +0000 UTC m=+147.117111821 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.826130 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.826446 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.326419501 +0000 UTC m=+147.217939107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.937576 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:39 crc kubenswrapper[4611]: E0929 12:42:39.938095 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.438078565 +0000 UTC m=+147.329598161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.980489 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.980873 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 29 12:42:39 crc kubenswrapper[4611]: I0929 12:42:39.980926 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.038897 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.039244 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.539226689 +0000 UTC m=+147.430746295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.140316 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.140667 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.64065141 +0000 UTC m=+147.532171016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.243267 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.243467 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.743436289 +0000 UTC m=+147.634955895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.243750 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.244102 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.744093657 +0000 UTC m=+147.635613263 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.344399 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.344840 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.84482203 +0000 UTC m=+147.736341636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.451866 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.452279 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:40.952263878 +0000 UTC m=+147.843783484 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.492528 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2fct6" event={"ID":"13efc77e-d80b-4d27-adc7-e93303568154","Type":"ContainerStarted","Data":"c7a6e4387dbd00479a0850781b21ac6debc408299418c559f6f7c4731bc9349a"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.506044 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" event={"ID":"0eececdb-2bc0-45ec-88e3-7b64e2043876","Type":"ContainerStarted","Data":"bc135efa3fdd4a64f56f0c70150dc723d2f2e3900a7de845c3c4eb89e8db1321"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.532358 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" event={"ID":"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847","Type":"ContainerStarted","Data":"11fdac83c439f342dca4aa73813515633a4a779d5cd563861b52fd5ed9ca95f0"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.532505 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" event={"ID":"8cc8c201-cfc2-4b2f-a69b-daf3f79ba847","Type":"ContainerStarted","Data":"2d1cff044640eef2b059a1ff39b952eca210e394795b668af5a47a5c59eb612a"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.534770 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" event={"ID":"501f1b34-13aa-4b8c-8fac-d525ad0626b7","Type":"ContainerStarted","Data":"d7d085db834b78ea782bc3d523ce37330c978583832ce73543e53469d3307ff3"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.553142 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.553871 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.053845194 +0000 UTC m=+147.945364800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.590796 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" event={"ID":"ad4b3e69-ec86-4fba-bd07-af9f1532fed1","Type":"ContainerStarted","Data":"32a3071c6088e3bcbbad278adec8bcc1bcf8f50a7f88c5c9d09efbd5d2547e98"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.618278 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ldbq" podStartSLOduration=122.618261003 podStartE2EDuration="2m2.618261003s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:40.617869932 +0000 UTC m=+147.509389528" watchObservedRunningTime="2025-09-29 12:42:40.618261003 +0000 UTC m=+147.509780609" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.620565 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-smw45" event={"ID":"c758f379-2019-4a2c-9ee3-12030ae2f85b","Type":"ContainerStarted","Data":"c1edd311f320510f89bb62c2a550ef34051b2994f178abc43626d5b3358a1c7d"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.641467 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" event={"ID":"7ed1460d-346f-4749-8a8d-107744d6b4a3","Type":"ContainerStarted","Data":"ffcfc055fcf9ae80610581df88c3f5ca13ca14355774784312722be5289bacef"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.648988 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" event={"ID":"e598e87c-ea30-47db-9f40-1155b730e8d4","Type":"ContainerStarted","Data":"3d20936b7d76fb39bf51de996945bdefd0cb94cb88945834fa6bf98ff0322f9d"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.658959 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.659011 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.659053 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.660369 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.160346136 +0000 UTC m=+148.051865842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.661789 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" event={"ID":"cf70983e-cbf7-4c9d-ab94-2aceade70418","Type":"ContainerStarted","Data":"80683a01a8ac8cd5be95a5bb89d1fea5c579a7597ac4ff9397882d5f245ceafa"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.662618 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.665469 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.685701 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" event={"ID":"651fdc30-c679-4d23-a889-2cf5e377afb1","Type":"ContainerStarted","Data":"692b917199903f048ada8b0052d657579663e301a255c707ccc10e1a632695b9"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.686853 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.688775 4611 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qkkfv container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.688822 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" podUID="651fdc30-c679-4d23-a889-2cf5e377afb1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.695374 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.739750 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8hq4m" podStartSLOduration=122.739726038 podStartE2EDuration="2m2.739726038s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:40.734291608 +0000 UTC m=+147.625811214" watchObservedRunningTime="2025-09-29 12:42:40.739726038 +0000 UTC m=+147.631245644" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.759575 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.759779 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.759832 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.259818823 +0000 UTC m=+148.151338429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.759900 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.759937 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.760205 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.260196874 +0000 UTC m=+148.151716480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.769977 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2fgwq" event={"ID":"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d","Type":"ContainerStarted","Data":"cfb18de42552ebe1fc8ed1d00b9b865bf163a440efbda6c9605877bb6261685c"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.775783 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.779351 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.834741 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" podStartSLOduration=123.834727123 podStartE2EDuration="2m3.834727123s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:40.833307443 +0000 UTC m=+147.724827049" watchObservedRunningTime="2025-09-29 12:42:40.834727123 +0000 UTC m=+147.726246729" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.861055 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.861369 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.361355458 +0000 UTC m=+148.252875054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.892046 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-44sf4" podStartSLOduration=122.891964304 podStartE2EDuration="2m2.891964304s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:40.886098542 +0000 UTC m=+147.777618138" watchObservedRunningTime="2025-09-29 12:42:40.891964304 +0000 UTC m=+147.783483910" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.897398 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" event={"ID":"7a5592e9-b443-4f48-b970-8e7b52c722f0","Type":"ContainerStarted","Data":"d18dea7113d3ab28c18c9c6d208db7378c3d8d76e0272b03fa1c3415e8f9ab6b"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.950852 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.960124 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.962744 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:40 crc kubenswrapper[4611]: E0929 12:42:40.963008 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.462996976 +0000 UTC m=+148.354516582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.969948 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.979479 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" event={"ID":"b5166b57-5645-49e8-870c-108f64403797","Type":"ContainerStarted","Data":"387353fba794ff5f2a5cf0a2549024b0232ae6ecf650b9fe4f441d86c4305a50"} Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.994659 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:40 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:40 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:40 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:40 crc kubenswrapper[4611]: I0929 12:42:40.994712 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.005082 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" event={"ID":"ca3bb61b-97fa-4e78-b84c-66b82267072d","Type":"ContainerStarted","Data":"ab775d3a34ab2df063c4b071ff04f0f83e5342e27c6414b1680baa441e06990e"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.041949 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" event={"ID":"615f5d32-538b-4c6a-8bb9-57eedec2a126","Type":"ContainerStarted","Data":"f94c35f42e39ee8d39b9535e4cddfa9fd9cfaeef830d25a322ca683cb5f1518d"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.063459 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.064592 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.564571662 +0000 UTC m=+148.456091288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.065906 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7lv98" event={"ID":"be6d0117-b4cb-4cf3-8ecf-7add7e040587","Type":"ContainerStarted","Data":"530a2445099a728570ce0e87a7621a619b144cc25e379ebf96a057a1dc76fc0d"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.075934 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" event={"ID":"e5af436c-0542-4160-8a3b-418382623820","Type":"ContainerStarted","Data":"8a6f0eace2c2326aa7c9da3255a163bc3c4d79a6ec241c54736ea49772deaa2c"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.081046 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" podStartSLOduration=123.081031966 podStartE2EDuration="2m3.081031966s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:41.080394299 +0000 UTC m=+147.971913915" watchObservedRunningTime="2025-09-29 12:42:41.081031966 +0000 UTC m=+147.972551572" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.082040 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wwvsv" podStartSLOduration=123.082032904 podStartE2EDuration="2m3.082032904s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:40.994949708 +0000 UTC m=+147.886469314" watchObservedRunningTime="2025-09-29 12:42:41.082032904 +0000 UTC m=+147.973552510" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.088068 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" event={"ID":"14166aa7-7554-4165-9a14-f222a13d3c82","Type":"ContainerStarted","Data":"cd99cb948721086295875fadbfb0d69837a3f55809291757757d7c8f546f4df1"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.088111 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" event={"ID":"14166aa7-7554-4165-9a14-f222a13d3c82","Type":"ContainerStarted","Data":"8035686dcc2b281da2c112273b425967d3c536015d50bc5b6afa2b56fe796df9"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.127022 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" event={"ID":"f5f3de88-84fb-42b9-953f-74f002a8af28","Type":"ContainerStarted","Data":"e48c88a4491ff6bf7c65e125c335cb591afa74e74afaa8327481904e3746d569"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.127949 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.147011 4611 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-qght9 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.147088 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.153968 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" event={"ID":"03b1269f-c666-43fc-b39b-30a0174f3a38","Type":"ContainerStarted","Data":"53b14077dfd5326855c1dd6f0d4a98832a7b461ec51ac957d8f2a0b30754478c"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.156091 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" event={"ID":"00b46678-03aa-4e70-af99-c622eb2b2508","Type":"ContainerStarted","Data":"6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7"} Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.156142 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.158233 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.158272 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.173481 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.174509 4611 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rcs29 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.174561 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.175693 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.675679191 +0000 UTC m=+148.567198797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.179351 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9trd8" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.190266 4611 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-t4mbb container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.190325 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" podUID="cf70983e-cbf7-4c9d-ab94-2aceade70418" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.190404 4611 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-t4mbb container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.190419 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" podUID="cf70983e-cbf7-4c9d-ab94-2aceade70418" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.195465 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" podStartSLOduration=124.195448697 podStartE2EDuration="2m4.195448697s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:41.194904252 +0000 UTC m=+148.086423868" watchObservedRunningTime="2025-09-29 12:42:41.195448697 +0000 UTC m=+148.086968313" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.231449 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-7lv98" podStartSLOduration=9.231434321 podStartE2EDuration="9.231434321s" podCreationTimestamp="2025-09-29 12:42:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:41.230494785 +0000 UTC m=+148.122014381" watchObservedRunningTime="2025-09-29 12:42:41.231434321 +0000 UTC m=+148.122953927" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.276799 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.278114 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.77809896 +0000 UTC m=+148.669618566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.348202 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" podStartSLOduration=124.348186726 podStartE2EDuration="2m4.348186726s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:41.300024126 +0000 UTC m=+148.191543732" watchObservedRunningTime="2025-09-29 12:42:41.348186726 +0000 UTC m=+148.239706332" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.378324 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.378655 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.878610757 +0000 UTC m=+148.770130363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.442098 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.442904 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.454185 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" podStartSLOduration=123.454170304 podStartE2EDuration="2m3.454170304s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:41.450800581 +0000 UTC m=+148.342320197" watchObservedRunningTime="2025-09-29 12:42:41.454170304 +0000 UTC m=+148.345689910" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.460320 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.473431 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.483309 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.483503 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.483543 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.483701 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:41.983685199 +0000 UTC m=+148.875204805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.493192 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.584498 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.584538 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.584571 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.585121 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.085105611 +0000 UTC m=+148.976625217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.585170 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.663963 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.686104 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.687253 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.187236102 +0000 UTC m=+149.078755708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.687454 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.687749 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.187741826 +0000 UTC m=+149.079261432 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.701949 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-h94vp" podStartSLOduration=123.701926248 podStartE2EDuration="2m3.701926248s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:41.599010315 +0000 UTC m=+148.490529931" watchObservedRunningTime="2025-09-29 12:42:41.701926248 +0000 UTC m=+148.593445854" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.775014 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.798132 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.798381 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.29831569 +0000 UTC m=+149.189835306 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.798806 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.799132 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.299117423 +0000 UTC m=+149.190637029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.900668 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.900877 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.400851603 +0000 UTC m=+149.292371209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.900929 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:41 crc kubenswrapper[4611]: E0929 12:42:41.901266 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.401252064 +0000 UTC m=+149.292771680 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.987727 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:41 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:41 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:41 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:41 crc kubenswrapper[4611]: I0929 12:42:41.987782 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.002057 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.002322 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.502308185 +0000 UTC m=+149.393827791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: W0929 12:42:42.096016 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-9aecd23a9c8272495724dd9ff504e94d312ef0f6fb54da04ed9bc1ebe4d93520 WatchSource:0}: Error finding container 9aecd23a9c8272495724dd9ff504e94d312ef0f6fb54da04ed9bc1ebe4d93520: Status 404 returned error can't find the container with id 9aecd23a9c8272495724dd9ff504e94d312ef0f6fb54da04ed9bc1ebe4d93520 Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.112127 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.112489 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.612475919 +0000 UTC m=+149.503995525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.219183 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.219334 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.71930256 +0000 UTC m=+149.610822166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.219801 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.220225 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.720213065 +0000 UTC m=+149.611732671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.233701 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" event={"ID":"0eececdb-2bc0-45ec-88e3-7b64e2043876","Type":"ContainerStarted","Data":"712f451f03c3d43c11da6f0938cc7deb51aa2b7b7fdfef82fbd997a1845a1b17"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.249854 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1350da802246815d3feba3b1ad63d674d05e966b453cba4fafd484bdaa037537"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.260998 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9aecd23a9c8272495724dd9ff504e94d312ef0f6fb54da04ed9bc1ebe4d93520"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.268540 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" event={"ID":"a2434a6e-935b-4dde-ad5c-cbb8364c7034","Type":"ContainerStarted","Data":"bad2e1dea03c1fe70dfdfda4568bbacbee0ec6c762dd477f0138d51e019b2704"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.283967 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" event={"ID":"7a5592e9-b443-4f48-b970-8e7b52c722f0","Type":"ContainerStarted","Data":"16380403be377ad3a30660801d7a53647550eef4a67b4315521962f7aca12a62"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.285347 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" event={"ID":"947d6530-c9b4-45cb-9f52-d83f26bda55a","Type":"ContainerStarted","Data":"c59360be814fd3ffa556558921316c6da4510f36b000d21acf23e7ad3ce4d32c"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.286929 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" event={"ID":"2ec0a709-b839-4a69-a6fd-2aaa11fdf388","Type":"ContainerStarted","Data":"3bc8a1e5d08a166490fff06d7364a67c129d46683fb57e903db56929b5f27e9a"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.291147 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" event={"ID":"501f1b34-13aa-4b8c-8fac-d525ad0626b7","Type":"ContainerStarted","Data":"beb354b6d54558f9829956d108580aedbe6b2faa072b19121e4a050c243b7263"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.307949 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" event={"ID":"14166aa7-7554-4165-9a14-f222a13d3c82","Type":"ContainerStarted","Data":"4418af581e2257fc2f6bc82d530960848ab461f27ea51030a32e62675a348859"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.310317 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" event={"ID":"615f5d32-538b-4c6a-8bb9-57eedec2a126","Type":"ContainerStarted","Data":"d95b4b5b4ef75a2adfd80ec1100421fb7c51b939b5b47b9a2b05b692a936e675"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.310888 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.321241 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.321596 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.821582395 +0000 UTC m=+149.713102001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.324709 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" event={"ID":"b5166b57-5645-49e8-870c-108f64403797","Type":"ContainerStarted","Data":"cc88ae98567dc511a729bd51de634c4cc968328a925dfbba99a13d11745aa471"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.333338 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" event={"ID":"40660608-6535-4d31-b8af-564e51dae5c8","Type":"ContainerStarted","Data":"afc77d9d78cabb041627152247745c0b2e953f22f110774deb40df0d243837f3"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.334174 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.338703 4611 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wg2qr container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" start-of-body= Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.338746 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" podUID="40660608-6535-4d31-b8af-564e51dae5c8" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.356951 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" event={"ID":"e5af436c-0542-4160-8a3b-418382623820","Type":"ContainerStarted","Data":"8c8c6aefab101a86d504d403c07502aff667504000117b8db5ead13f99ea1c6d"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.364772 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" event={"ID":"0842824a-309a-4133-a66d-c393cf09be7a","Type":"ContainerStarted","Data":"8520ba5b91cb9548d883086392be19d41802ec273512d21f94ddc509a22b5a03"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.375010 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2fgwq" event={"ID":"453ba89f-b2bb-4e7b-9f4b-fe5ed506ce9d","Type":"ContainerStarted","Data":"11f58d85af045eb1331742273d6c9dc81c4f6a98d3dec5eb8a721d3530b4d2e7"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.375580 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.386132 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" event={"ID":"f7471b65-3a4e-461a-b10d-43c2b0715b36","Type":"ContainerStarted","Data":"05aa0cb0a5d587a70c6e1411155e7e7f979294835cad6d9345189851c3f246a3"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.392584 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" event={"ID":"d33d2c15-5789-4553-b00c-b9c2d5f332a2","Type":"ContainerStarted","Data":"bed35ece5d9606afb6f9809c3ccdeee8c237f5a0d00e9c88099f85181575351c"} Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.406851 4611 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rcs29 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.406897 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.422807 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.424799 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:42.924786896 +0000 UTC m=+149.816306502 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.432810 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-59sbq" podStartSLOduration=124.432793367 podStartE2EDuration="2m4.432793367s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.329861714 +0000 UTC m=+149.221381330" watchObservedRunningTime="2025-09-29 12:42:42.432793367 +0000 UTC m=+149.324312973" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.433544 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-kv8lp" podStartSLOduration=124.433539008 podStartE2EDuration="2m4.433539008s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.432113588 +0000 UTC m=+149.323633194" watchObservedRunningTime="2025-09-29 12:42:42.433539008 +0000 UTC m=+149.325058614" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.524049 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.524791 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.024766527 +0000 UTC m=+149.916286133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.531494 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-ms74j" podStartSLOduration=124.531481253 podStartE2EDuration="2m4.531481253s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.528999794 +0000 UTC m=+149.420519390" watchObservedRunningTime="2025-09-29 12:42:42.531481253 +0000 UTC m=+149.423000859" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.625930 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.626227 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.12621512 +0000 UTC m=+150.017734726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.640298 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-jmp7r" podStartSLOduration=124.640279418 podStartE2EDuration="2m4.640279418s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.639398384 +0000 UTC m=+149.530918010" watchObservedRunningTime="2025-09-29 12:42:42.640279418 +0000 UTC m=+149.531799024" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.727917 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.728104 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.228078764 +0000 UTC m=+150.119598380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.728278 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.728589 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.228574767 +0000 UTC m=+150.120094373 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.815580 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" podStartSLOduration=124.81556205 podStartE2EDuration="2m4.81556205s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.808246108 +0000 UTC m=+149.699765714" watchObservedRunningTime="2025-09-29 12:42:42.81556205 +0000 UTC m=+149.707081656" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.817431 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k9jp6" podStartSLOduration=124.817412631 podStartE2EDuration="2m4.817412631s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.715101305 +0000 UTC m=+149.606620911" watchObservedRunningTime="2025-09-29 12:42:42.817412631 +0000 UTC m=+149.708932237" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.831090 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.831394 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.331379307 +0000 UTC m=+150.222898913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.913254 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lxmhx" podStartSLOduration=124.913236018 podStartE2EDuration="2m4.913236018s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.912269862 +0000 UTC m=+149.803789468" watchObservedRunningTime="2025-09-29 12:42:42.913236018 +0000 UTC m=+149.804755624" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.913804 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" podStartSLOduration=124.913798684 podStartE2EDuration="2m4.913798684s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.871857805 +0000 UTC m=+149.763377411" watchObservedRunningTime="2025-09-29 12:42:42.913798684 +0000 UTC m=+149.805318290" Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.932449 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:42 crc kubenswrapper[4611]: E0929 12:42:42.932761 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.432749417 +0000 UTC m=+150.324269023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.988609 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:42 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:42 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:42 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:42 crc kubenswrapper[4611]: I0929 12:42:42.988682 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.016024 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b9wt8" podStartSLOduration=126.016006907 podStartE2EDuration="2m6.016006907s" podCreationTimestamp="2025-09-29 12:40:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:42.946858517 +0000 UTC m=+149.838378123" watchObservedRunningTime="2025-09-29 12:42:43.016006907 +0000 UTC m=+149.907526513" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.036234 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.036571 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.536550415 +0000 UTC m=+150.428070031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.081875 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" podStartSLOduration=125.081854846 podStartE2EDuration="2m5.081854846s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:43.018932788 +0000 UTC m=+149.910452394" watchObservedRunningTime="2025-09-29 12:42:43.081854846 +0000 UTC m=+149.973374452" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.137461 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.137792 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.637777231 +0000 UTC m=+150.529296837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.155753 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mf6jx" podStartSLOduration=125.155734737 podStartE2EDuration="2m5.155734737s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:43.108903923 +0000 UTC m=+150.000423539" watchObservedRunningTime="2025-09-29 12:42:43.155734737 +0000 UTC m=+150.047254343" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.238498 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.238825 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.738811682 +0000 UTC m=+150.630331288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.243272 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-2fgwq" podStartSLOduration=11.243257075 podStartE2EDuration="11.243257075s" podCreationTimestamp="2025-09-29 12:42:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:43.172306505 +0000 UTC m=+150.063826121" watchObservedRunningTime="2025-09-29 12:42:43.243257075 +0000 UTC m=+150.134776681" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.243969 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" podStartSLOduration=125.243962124 podStartE2EDuration="2m5.243962124s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:43.221977957 +0000 UTC m=+150.113497563" watchObservedRunningTime="2025-09-29 12:42:43.243962124 +0000 UTC m=+150.135481730" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.266732 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wc9wj" podStartSLOduration=125.266716803 podStartE2EDuration="2m5.266716803s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:43.264425589 +0000 UTC m=+150.155945205" watchObservedRunningTime="2025-09-29 12:42:43.266716803 +0000 UTC m=+150.158236409" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.312263 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 12:42:43 crc kubenswrapper[4611]: W0929 12:42:43.344489 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod0bb9b9df_afc8_408c_b4f7_6a99c15969fc.slice/crio-37716f7f0614b1ed40814d545c1169bd7a85815a0a22977a67a743dceded0a73 WatchSource:0}: Error finding container 37716f7f0614b1ed40814d545c1169bd7a85815a0a22977a67a743dceded0a73: Status 404 returned error can't find the container with id 37716f7f0614b1ed40814d545c1169bd7a85815a0a22977a67a743dceded0a73 Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.345364 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.345755 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.845742136 +0000 UTC m=+150.737261742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.392997 4611 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qkkfv container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.393075 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" podUID="651fdc30-c679-4d23-a889-2cf5e377afb1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.398690 4611 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-t4mbb container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.398739 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" podUID="cf70983e-cbf7-4c9d-ab94-2aceade70418" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.398802 4611 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-qght9 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.398870 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.416490 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"79866de793fbd1e69018f88eff7a391cc8ae79391e28e889377c489c66c8e94a"} Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.436332 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f9f1474624f71444fcd391c22771d2960380b8d2c0acd1a9b4dbbb8c8fb83797"} Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.436390 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4cafb5aa87bf2007ddfffaa5529acdd8748c5ec6cbadc4c7a64fda960645799a"} Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.444444 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0bb9b9df-afc8-408c-b4f7-6a99c15969fc","Type":"ContainerStarted","Data":"37716f7f0614b1ed40814d545c1169bd7a85815a0a22977a67a743dceded0a73"} Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.446152 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.446463 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:43.946447248 +0000 UTC m=+150.837966854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.467007 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d38b94dfe8c93274396487d6efc670b591cae8b776a5b8f0470813200773cdcd"} Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.467047 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.474003 4611 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wg2qr container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" start-of-body= Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.474063 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" podUID="40660608-6535-4d31-b8af-564e51dae5c8" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.549884 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.554066 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.05405332 +0000 UTC m=+150.945572926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.657082 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.657489 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.157474967 +0000 UTC m=+151.048994573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.759401 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.759788 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.259762521 +0000 UTC m=+151.151282157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.861715 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.862158 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.36214335 +0000 UTC m=+151.253662946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.963789 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:43 crc kubenswrapper[4611]: E0929 12:42:43.964418 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.464405494 +0000 UTC m=+151.355925100 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.993997 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:43 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:43 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:43 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:43 crc kubenswrapper[4611]: I0929 12:42:43.994053 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.066083 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.066501 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.566477834 +0000 UTC m=+151.457997430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.167267 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.167589 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.667576957 +0000 UTC m=+151.559096563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.220844 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.268240 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.268455 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.768423422 +0000 UTC m=+151.659943028 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.268531 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.269039 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.769022269 +0000 UTC m=+151.660541875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.369698 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.370468 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.870452891 +0000 UTC m=+151.761972497 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.471322 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.471709 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:44.971694177 +0000 UTC m=+151.863213783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.472997 4611 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-t4mbb container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.473027 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" podUID="cf70983e-cbf7-4c9d-ab94-2aceade70418" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.473128 4611 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qkkfv container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.473147 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" podUID="651fdc30-c679-4d23-a889-2cf5e377afb1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.480907 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-p68fp" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.491120 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.491740 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.532841 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0bb9b9df-afc8-408c-b4f7-6a99c15969fc","Type":"ContainerStarted","Data":"0f27413fc0963a5241d946f8e7eb324844f04bc20cb2160dffe95f8dfc1e3269"} Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.546664 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.554954 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" event={"ID":"947d6530-c9b4-45cb-9f52-d83f26bda55a","Type":"ContainerStarted","Data":"bfb007652da03018461999fb19b5c62302c1e4a1a0db23f1934fa44aafef3ecb"} Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.554991 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" event={"ID":"947d6530-c9b4-45cb-9f52-d83f26bda55a","Type":"ContainerStarted","Data":"34538065425a5422022693a2df44f6cd9728537e2d6c1678a2b97b8ea5ff28d5"} Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.578971 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.580329 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.080313368 +0000 UTC m=+151.971832974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.624341 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.624324134 podStartE2EDuration="3.624324134s" podCreationTimestamp="2025-09-29 12:42:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:44.624279342 +0000 UTC m=+151.515798948" watchObservedRunningTime="2025-09-29 12:42:44.624324134 +0000 UTC m=+151.515843740" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.645966 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wg2qr" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.680980 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.681701 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.181674468 +0000 UTC m=+152.073194114 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.782060 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.782271 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.282241636 +0000 UTC m=+152.173761242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.782316 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.782694 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.282684058 +0000 UTC m=+152.174203704 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.883276 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.883409 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.38339033 +0000 UTC m=+152.274909936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.883506 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.883777 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.3837681 +0000 UTC m=+152.275287706 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.983457 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:44 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:44 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:44 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.983534 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.984108 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.984250 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.484223585 +0000 UTC m=+152.375743191 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:44 crc kubenswrapper[4611]: I0929 12:42:44.984291 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:44 crc kubenswrapper[4611]: E0929 12:42:44.984608 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.484596876 +0000 UTC m=+152.376116492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.085786 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.085924 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.585905504 +0000 UTC m=+152.477425120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.086343 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.086727 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.586715307 +0000 UTC m=+152.478234923 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.187054 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.187257 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.687235733 +0000 UTC m=+152.578755339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.187620 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.187962 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.687937693 +0000 UTC m=+152.579457299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.208506 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-t4mbb" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.219708 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.219765 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.221222 4611 patch_prober.go:28] interesting pod/console-f9d7485db-c7v9z container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.221268 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-c7v9z" podUID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.288308 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.288627 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.788593703 +0000 UTC m=+152.680113319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.288978 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.299791 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.799769872 +0000 UTC m=+152.691289478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.349564 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.350240 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.357103 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.357153 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.357446 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.357471 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.358649 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.389937 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.390163 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.890140498 +0000 UTC m=+152.781660104 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.390420 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.390754 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.890746565 +0000 UTC m=+152.782266171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.491213 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.491387 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.991359684 +0000 UTC m=+152.882879290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.491792 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.492126 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:45.992116115 +0000 UTC m=+152.883635791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.545022 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rxkx4"] Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.546092 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.548752 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.560307 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" event={"ID":"947d6530-c9b4-45cb-9f52-d83f26bda55a","Type":"ContainerStarted","Data":"805bf43a407a0e93db02c6fe96fcf98f64bc78a7f6ca6699b721b74819a34984"} Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.561830 4611 generic.go:334] "Generic (PLEG): container finished" podID="0bb9b9df-afc8-408c-b4f7-6a99c15969fc" containerID="0f27413fc0963a5241d946f8e7eb324844f04bc20cb2160dffe95f8dfc1e3269" exitCode=0 Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.562263 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0bb9b9df-afc8-408c-b4f7-6a99c15969fc","Type":"ContainerDied","Data":"0f27413fc0963a5241d946f8e7eb324844f04bc20cb2160dffe95f8dfc1e3269"} Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.568174 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-g85mz" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.572845 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rxkx4"] Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.592714 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.592841 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.092814447 +0000 UTC m=+152.984334063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.592870 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6s26\" (UniqueName: \"kubernetes.io/projected/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-kube-api-access-x6s26\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.592902 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-catalog-content\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.592950 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.592983 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-utilities\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.593239 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.093227988 +0000 UTC m=+152.984747684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.593883 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7q4jc" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.693900 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.694089 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.194063634 +0000 UTC m=+153.085583240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.694191 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-catalog-content\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.694369 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.694429 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-utilities\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.694592 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6s26\" (UniqueName: \"kubernetes.io/projected/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-kube-api-access-x6s26\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.696020 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-catalog-content\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.696464 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-utilities\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.696881 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.196873122 +0000 UTC m=+153.088392728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.758917 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6s26\" (UniqueName: \"kubernetes.io/projected/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-kube-api-access-x6s26\") pod \"certified-operators-rxkx4\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.771806 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v58w2"] Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.772622 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.778972 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.796178 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.797046 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-catalog-content\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.797181 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-utilities\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.797299 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsdtp\" (UniqueName: \"kubernetes.io/projected/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-kube-api-access-dsdtp\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.797579 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.297556713 +0000 UTC m=+153.189076319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.818440 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v58w2"] Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.829369 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-vbvkv" podStartSLOduration=12.829352651 podStartE2EDuration="12.829352651s" podCreationTimestamp="2025-09-29 12:42:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:45.828463707 +0000 UTC m=+152.719983313" watchObservedRunningTime="2025-09-29 12:42:45.829352651 +0000 UTC m=+152.720872257" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.859064 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.899382 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsdtp\" (UniqueName: \"kubernetes.io/projected/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-kube-api-access-dsdtp\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.899456 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.899514 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-catalog-content\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.899542 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-utilities\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.899956 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-utilities\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: E0929 12:42:45.900502 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.400488036 +0000 UTC m=+153.292007652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.900902 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-catalog-content\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.944052 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9djgx"] Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.945051 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.976225 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsdtp\" (UniqueName: \"kubernetes.io/projected/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-kube-api-access-dsdtp\") pod \"community-operators-v58w2\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.983506 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9djgx"] Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.984963 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.995225 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:45 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:45 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:45 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:45 crc kubenswrapper[4611]: I0929 12:42:45.995289 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.001929 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.002103 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-utilities\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.002125 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-catalog-content\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.002186 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgc45\" (UniqueName: \"kubernetes.io/projected/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-kube-api-access-rgc45\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.002279 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.502263248 +0000 UTC m=+153.393782854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.094038 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.104719 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.104778 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-utilities\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.104799 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-catalog-content\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.104891 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgc45\" (UniqueName: \"kubernetes.io/projected/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-kube-api-access-rgc45\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.105140 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.605122239 +0000 UTC m=+153.496641845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.105679 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-catalog-content\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.106109 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-utilities\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.113264 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mwfrw"] Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.114196 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.148132 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwfrw"] Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.150038 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgc45\" (UniqueName: \"kubernetes.io/projected/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-kube-api-access-rgc45\") pod \"certified-operators-9djgx\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.176006 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.207163 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.207381 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5smjw\" (UniqueName: \"kubernetes.io/projected/e26ad2ad-57ca-4097-ad5f-08162afc62b6-kube-api-access-5smjw\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.207453 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-utilities\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.207487 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-catalog-content\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.207576 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.707562679 +0000 UTC m=+153.599082285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.250711 4611 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.288178 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.308476 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5smjw\" (UniqueName: \"kubernetes.io/projected/e26ad2ad-57ca-4097-ad5f-08162afc62b6-kube-api-access-5smjw\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.308538 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.308570 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-utilities\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.308603 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-catalog-content\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.309001 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-catalog-content\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.309740 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-utilities\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.310163 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.810146482 +0000 UTC m=+153.701666118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.354579 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5smjw\" (UniqueName: \"kubernetes.io/projected/e26ad2ad-57ca-4097-ad5f-08162afc62b6-kube-api-access-5smjw\") pod \"community-operators-mwfrw\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.418116 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.418455 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:46.918439334 +0000 UTC m=+153.809958930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.447411 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qkkfv" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.452897 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.525900 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.527812 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:47.02761216 +0000 UTC m=+153.919131766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.628154 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.629382 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:47.1293668 +0000 UTC m=+154.020886406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.702563 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rxkx4"] Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.730238 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.730505 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:47.230494134 +0000 UTC m=+154.122013740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.794668 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v58w2"] Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.832304 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.832612 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 12:42:47.332597995 +0000 UTC m=+154.224117601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.896934 4611 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T12:42:46.250739481Z","Handler":null,"Name":""} Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.933685 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:46 crc kubenswrapper[4611]: E0929 12:42:46.933943 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 12:42:47.433933174 +0000 UTC m=+154.325452780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nt8jb" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.989820 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:46 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:46 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:46 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:46 crc kubenswrapper[4611]: I0929 12:42:46.989874 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.023929 4611 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.024246 4611 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.037031 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.063847 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9djgx"] Sep 29 12:42:47 crc kubenswrapper[4611]: W0929 12:42:47.086652 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c14a4b9_4eeb_4174_8343_5ba7e2a2234d.slice/crio-314ac21b0525302f0dc47dc141cd169fff2fe181e838ed97d83194c4014a404b WatchSource:0}: Error finding container 314ac21b0525302f0dc47dc141cd169fff2fe181e838ed97d83194c4014a404b: Status 404 returned error can't find the container with id 314ac21b0525302f0dc47dc141cd169fff2fe181e838ed97d83194c4014a404b Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.119530 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.139124 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.167110 4611 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.167143 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.339251 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwfrw"] Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.352003 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nt8jb\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:47 crc kubenswrapper[4611]: W0929 12:42:47.357001 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode26ad2ad_57ca_4097_ad5f_08162afc62b6.slice/crio-59cd5bee2dbee659fa78c4a790f5b5cebde9f1f92c336c7293148732bf609bbc WatchSource:0}: Error finding container 59cd5bee2dbee659fa78c4a790f5b5cebde9f1f92c336c7293148732bf609bbc: Status 404 returned error can't find the container with id 59cd5bee2dbee659fa78c4a790f5b5cebde9f1f92c336c7293148732bf609bbc Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.421671 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.448565 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kube-api-access\") pod \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.448652 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kubelet-dir\") pod \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\" (UID: \"0bb9b9df-afc8-408c-b4f7-6a99c15969fc\") " Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.448950 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0bb9b9df-afc8-408c-b4f7-6a99c15969fc" (UID: "0bb9b9df-afc8-408c-b4f7-6a99c15969fc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.456018 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0bb9b9df-afc8-408c-b4f7-6a99c15969fc" (UID: "0bb9b9df-afc8-408c-b4f7-6a99c15969fc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.460260 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.510738 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kd7t4"] Sep 29 12:42:47 crc kubenswrapper[4611]: E0929 12:42:47.511201 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb9b9df-afc8-408c-b4f7-6a99c15969fc" containerName="pruner" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.511214 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb9b9df-afc8-408c-b4f7-6a99c15969fc" containerName="pruner" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.511315 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bb9b9df-afc8-408c-b4f7-6a99c15969fc" containerName="pruner" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.512134 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.516639 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.531704 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd7t4"] Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.550434 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8ck7\" (UniqueName: \"kubernetes.io/projected/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-kube-api-access-t8ck7\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.550497 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-catalog-content\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.550561 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-utilities\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.550609 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.550624 4611 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb9b9df-afc8-408c-b4f7-6a99c15969fc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.586876 4611 generic.go:334] "Generic (PLEG): container finished" podID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerID="f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817" exitCode=0 Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.587559 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwfrw" event={"ID":"e26ad2ad-57ca-4097-ad5f-08162afc62b6","Type":"ContainerDied","Data":"f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.587609 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwfrw" event={"ID":"e26ad2ad-57ca-4097-ad5f-08162afc62b6","Type":"ContainerStarted","Data":"59cd5bee2dbee659fa78c4a790f5b5cebde9f1f92c336c7293148732bf609bbc"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.588296 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.593750 4611 generic.go:334] "Generic (PLEG): container finished" podID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerID="ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd" exitCode=0 Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.593857 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9djgx" event={"ID":"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d","Type":"ContainerDied","Data":"ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.593907 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9djgx" event={"ID":"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d","Type":"ContainerStarted","Data":"314ac21b0525302f0dc47dc141cd169fff2fe181e838ed97d83194c4014a404b"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.597298 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.599001 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0bb9b9df-afc8-408c-b4f7-6a99c15969fc","Type":"ContainerDied","Data":"37716f7f0614b1ed40814d545c1169bd7a85815a0a22977a67a743dceded0a73"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.599043 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37716f7f0614b1ed40814d545c1169bd7a85815a0a22977a67a743dceded0a73" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.611025 4611 generic.go:334] "Generic (PLEG): container finished" podID="d33d2c15-5789-4553-b00c-b9c2d5f332a2" containerID="bed35ece5d9606afb6f9809c3ccdeee8c237f5a0d00e9c88099f85181575351c" exitCode=0 Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.611113 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" event={"ID":"d33d2c15-5789-4553-b00c-b9c2d5f332a2","Type":"ContainerDied","Data":"bed35ece5d9606afb6f9809c3ccdeee8c237f5a0d00e9c88099f85181575351c"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.621960 4611 generic.go:334] "Generic (PLEG): container finished" podID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerID="3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069" exitCode=0 Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.622148 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v58w2" event={"ID":"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e","Type":"ContainerDied","Data":"3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.622178 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v58w2" event={"ID":"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e","Type":"ContainerStarted","Data":"27f6980f28e37db79778cec7d131725aa6f3572f72eabd1588e6e6d99548d397"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.630990 4611 generic.go:334] "Generic (PLEG): container finished" podID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerID="ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e" exitCode=0 Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.631928 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxkx4" event={"ID":"264d8c44-48fe-46fa-a3c9-d0df2dc4837a","Type":"ContainerDied","Data":"ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.631954 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxkx4" event={"ID":"264d8c44-48fe-46fa-a3c9-d0df2dc4837a","Type":"ContainerStarted","Data":"8d9ec578036bb871b345b764f93677bdef2551cd2d15a95499f1c5b6c289a84f"} Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.651229 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-utilities\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.651397 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8ck7\" (UniqueName: \"kubernetes.io/projected/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-kube-api-access-t8ck7\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.651452 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-catalog-content\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.659263 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-catalog-content\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.659474 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-utilities\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.705938 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8ck7\" (UniqueName: \"kubernetes.io/projected/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-kube-api-access-t8ck7\") pod \"redhat-marketplace-kd7t4\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.769810 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.829311 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.869662 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nt8jb"] Sep 29 12:42:47 crc kubenswrapper[4611]: W0929 12:42:47.892500 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21d82b2d_3a11_4e88_9b00_cb9a9a62fa23.slice/crio-059bccec51b0521c9cc6d67a4f4fde8ba8d93e468cebddb5074e60138330facd WatchSource:0}: Error finding container 059bccec51b0521c9cc6d67a4f4fde8ba8d93e468cebddb5074e60138330facd: Status 404 returned error can't find the container with id 059bccec51b0521c9cc6d67a4f4fde8ba8d93e468cebddb5074e60138330facd Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.902436 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jdhsg"] Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.909656 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.924538 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdhsg"] Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.962828 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-catalog-content\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.962887 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-utilities\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.962925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcvnv\" (UniqueName: \"kubernetes.io/projected/2a2e9132-98ce-49b0-967c-e3d31eee618a-kube-api-access-qcvnv\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.995459 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:47 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:47 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:47 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:47 crc kubenswrapper[4611]: I0929 12:42:47.995504 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.064684 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-catalog-content\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.064727 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-utilities\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.064764 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcvnv\" (UniqueName: \"kubernetes.io/projected/2a2e9132-98ce-49b0-967c-e3d31eee618a-kube-api-access-qcvnv\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.065383 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-catalog-content\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.065588 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-utilities\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.084724 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcvnv\" (UniqueName: \"kubernetes.io/projected/2a2e9132-98ce-49b0-967c-e3d31eee618a-kube-api-access-qcvnv\") pod \"redhat-marketplace-jdhsg\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.107360 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd7t4"] Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.240988 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.437203 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdhsg"] Sep 29 12:42:48 crc kubenswrapper[4611]: W0929 12:42:48.458311 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a2e9132_98ce_49b0_967c_e3d31eee618a.slice/crio-a66b343d6f53a3ec463856aed16cb79690af36d546841e9bd3c3c98561dac228 WatchSource:0}: Error finding container a66b343d6f53a3ec463856aed16cb79690af36d546841e9bd3c3c98561dac228: Status 404 returned error can't find the container with id a66b343d6f53a3ec463856aed16cb79690af36d546841e9bd3c3c98561dac228 Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.643601 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd7t4" event={"ID":"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0","Type":"ContainerStarted","Data":"1401d83e703968448184f967e2f025b8414b71189dfd0f91aee0ae9de161759e"} Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.644981 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" event={"ID":"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23","Type":"ContainerStarted","Data":"21198cc8a170ee3b97a66ddef1dbb989a6b3ef1fba27b25183b2e1234c4e72ef"} Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.645012 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" event={"ID":"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23","Type":"ContainerStarted","Data":"059bccec51b0521c9cc6d67a4f4fde8ba8d93e468cebddb5074e60138330facd"} Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.646052 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdhsg" event={"ID":"2a2e9132-98ce-49b0-967c-e3d31eee618a","Type":"ContainerStarted","Data":"a66b343d6f53a3ec463856aed16cb79690af36d546841e9bd3c3c98561dac228"} Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.877423 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.907553 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mzmvg"] Sep 29 12:42:48 crc kubenswrapper[4611]: E0929 12:42:48.907782 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d33d2c15-5789-4553-b00c-b9c2d5f332a2" containerName="collect-profiles" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.907795 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d33d2c15-5789-4553-b00c-b9c2d5f332a2" containerName="collect-profiles" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.907883 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d33d2c15-5789-4553-b00c-b9c2d5f332a2" containerName="collect-profiles" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.908570 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.911477 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.964250 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mzmvg"] Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976012 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d33d2c15-5789-4553-b00c-b9c2d5f332a2-secret-volume\") pod \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976074 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d33d2c15-5789-4553-b00c-b9c2d5f332a2-config-volume\") pod \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976139 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbw9s\" (UniqueName: \"kubernetes.io/projected/d33d2c15-5789-4553-b00c-b9c2d5f332a2-kube-api-access-jbw9s\") pod \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\" (UID: \"d33d2c15-5789-4553-b00c-b9c2d5f332a2\") " Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976491 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-catalog-content\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976603 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-utilities\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976778 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ttkw\" (UniqueName: \"kubernetes.io/projected/2b22af10-abdb-4a44-bbb1-aa53980a366a-kube-api-access-9ttkw\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.976807 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d33d2c15-5789-4553-b00c-b9c2d5f332a2-config-volume" (OuterVolumeSpecName: "config-volume") pod "d33d2c15-5789-4553-b00c-b9c2d5f332a2" (UID: "d33d2c15-5789-4553-b00c-b9c2d5f332a2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.981456 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d33d2c15-5789-4553-b00c-b9c2d5f332a2-kube-api-access-jbw9s" (OuterVolumeSpecName: "kube-api-access-jbw9s") pod "d33d2c15-5789-4553-b00c-b9c2d5f332a2" (UID: "d33d2c15-5789-4553-b00c-b9c2d5f332a2"). InnerVolumeSpecName "kube-api-access-jbw9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.983619 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:48 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:48 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:48 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.983678 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:48 crc kubenswrapper[4611]: I0929 12:42:48.991985 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d33d2c15-5789-4553-b00c-b9c2d5f332a2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d33d2c15-5789-4553-b00c-b9c2d5f332a2" (UID: "d33d2c15-5789-4553-b00c-b9c2d5f332a2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.077822 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-catalog-content\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.077863 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-utilities\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.077919 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ttkw\" (UniqueName: \"kubernetes.io/projected/2b22af10-abdb-4a44-bbb1-aa53980a366a-kube-api-access-9ttkw\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.077982 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d33d2c15-5789-4553-b00c-b9c2d5f332a2-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.077994 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d33d2c15-5789-4553-b00c-b9c2d5f332a2-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.078005 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbw9s\" (UniqueName: \"kubernetes.io/projected/d33d2c15-5789-4553-b00c-b9c2d5f332a2-kube-api-access-jbw9s\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.078790 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-catalog-content\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.079040 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-utilities\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.100783 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ttkw\" (UniqueName: \"kubernetes.io/projected/2b22af10-abdb-4a44-bbb1-aa53980a366a-kube-api-access-9ttkw\") pod \"redhat-operators-mzmvg\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.221525 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.299167 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nx6hn"] Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.300052 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.324750 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nx6hn"] Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.381458 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-catalog-content\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.381510 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-utilities\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.381570 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq4sb\" (UniqueName: \"kubernetes.io/projected/b1ddfd2d-adff-45e7-818f-4e1ddd410769-kube-api-access-dq4sb\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.431297 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mzmvg"] Sep 29 12:42:49 crc kubenswrapper[4611]: W0929 12:42:49.437807 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b22af10_abdb_4a44_bbb1_aa53980a366a.slice/crio-b58b912e0ed4aa11fd664a2e86a0d3cbda16a2d06c78858be3157d15b4fd4d55 WatchSource:0}: Error finding container b58b912e0ed4aa11fd664a2e86a0d3cbda16a2d06c78858be3157d15b4fd4d55: Status 404 returned error can't find the container with id b58b912e0ed4aa11fd664a2e86a0d3cbda16a2d06c78858be3157d15b4fd4d55 Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.483141 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq4sb\" (UniqueName: \"kubernetes.io/projected/b1ddfd2d-adff-45e7-818f-4e1ddd410769-kube-api-access-dq4sb\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.483222 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-catalog-content\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.483271 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-utilities\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.484238 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-catalog-content\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.485172 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-utilities\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.503448 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq4sb\" (UniqueName: \"kubernetes.io/projected/b1ddfd2d-adff-45e7-818f-4e1ddd410769-kube-api-access-dq4sb\") pod \"redhat-operators-nx6hn\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.622018 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.673457 4611 generic.go:334] "Generic (PLEG): container finished" podID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerID="3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9" exitCode=0 Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.673512 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdhsg" event={"ID":"2a2e9132-98ce-49b0-967c-e3d31eee618a","Type":"ContainerDied","Data":"3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9"} Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.692506 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.693585 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5" event={"ID":"d33d2c15-5789-4553-b00c-b9c2d5f332a2","Type":"ContainerDied","Data":"66949a20cb106a3e2134526d7eedff1d50fb87af715ba52e1c7303a5868da957"} Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.693673 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66949a20cb106a3e2134526d7eedff1d50fb87af715ba52e1c7303a5868da957" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.721983 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerStarted","Data":"28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580"} Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.722027 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerStarted","Data":"b58b912e0ed4aa11fd664a2e86a0d3cbda16a2d06c78858be3157d15b4fd4d55"} Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.744802 4611 generic.go:334] "Generic (PLEG): container finished" podID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerID="9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828" exitCode=0 Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.764738 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd7t4" event={"ID":"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0","Type":"ContainerDied","Data":"9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828"} Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.764788 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.985928 4611 patch_prober.go:28] interesting pod/router-default-5444994796-6bg7b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 12:42:49 crc kubenswrapper[4611]: [-]has-synced failed: reason withheld Sep 29 12:42:49 crc kubenswrapper[4611]: [+]process-running ok Sep 29 12:42:49 crc kubenswrapper[4611]: healthz check failed Sep 29 12:42:49 crc kubenswrapper[4611]: I0929 12:42:49.986008 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6bg7b" podUID="d1ca0ac6-c50d-41d8-964e-5c065edd7197" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.077649 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" podStartSLOduration=132.077608483 podStartE2EDuration="2m12.077608483s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:42:49.843248529 +0000 UTC m=+156.734768135" watchObservedRunningTime="2025-09-29 12:42:50.077608483 +0000 UTC m=+156.969128089" Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.080670 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nx6hn"] Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.756829 4611 generic.go:334] "Generic (PLEG): container finished" podID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerID="28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580" exitCode=0 Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.756920 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerDied","Data":"28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580"} Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.776361 4611 generic.go:334] "Generic (PLEG): container finished" podID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerID="fe9cb72a650acf93412ea06339afef7edef5cca42e88042e81ce56e7345ea850" exitCode=0 Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.777699 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerDied","Data":"fe9cb72a650acf93412ea06339afef7edef5cca42e88042e81ce56e7345ea850"} Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.777735 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerStarted","Data":"dc9880898955f640700d4b3b5ef0b0ea0cc7d2d7fb2e96ae5281d0a1901ce00b"} Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.921964 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.922762 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.932041 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.932365 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.957828 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 12:42:50 crc kubenswrapper[4611]: I0929 12:42:50.986881 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:50.996025 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-6bg7b" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.032792 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.032892 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.138662 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.138740 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.138849 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.186311 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.188808 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-2fgwq" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.302594 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:51 crc kubenswrapper[4611]: I0929 12:42:51.962081 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 12:42:52 crc kubenswrapper[4611]: I0929 12:42:52.832717 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"94612e18-a9da-4ea2-bf41-bc79d340a6dc","Type":"ContainerStarted","Data":"b68eb497fb3fc98fea2956a6adde579c9ecf8ad7f0c3874c007a6e07b36f4ae2"} Sep 29 12:42:53 crc kubenswrapper[4611]: I0929 12:42:53.892051 4611 generic.go:334] "Generic (PLEG): container finished" podID="94612e18-a9da-4ea2-bf41-bc79d340a6dc" containerID="63345059b24a39bc930b9ce8e8d8ce124dc6713777b43b04378c229f05b5bf46" exitCode=0 Sep 29 12:42:53 crc kubenswrapper[4611]: I0929 12:42:53.892108 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"94612e18-a9da-4ea2-bf41-bc79d340a6dc","Type":"ContainerDied","Data":"63345059b24a39bc930b9ce8e8d8ce124dc6713777b43b04378c229f05b5bf46"} Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.227569 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.242230 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.292742 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.356655 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.356697 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.356720 4611 patch_prober.go:28] interesting pod/downloads-7954f5f757-jjrl6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.356741 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jjrl6" podUID="fb819a41-a91e-439f-a34f-331f9731340a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.443394 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kubelet-dir\") pod \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.444239 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kube-api-access\") pod \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\" (UID: \"94612e18-a9da-4ea2-bf41-bc79d340a6dc\") " Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.444690 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "94612e18-a9da-4ea2-bf41-bc79d340a6dc" (UID: "94612e18-a9da-4ea2-bf41-bc79d340a6dc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.479642 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "94612e18-a9da-4ea2-bf41-bc79d340a6dc" (UID: "94612e18-a9da-4ea2-bf41-bc79d340a6dc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.546132 4611 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.546171 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/94612e18-a9da-4ea2-bf41-bc79d340a6dc-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.941541 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.941556 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"94612e18-a9da-4ea2-bf41-bc79d340a6dc","Type":"ContainerDied","Data":"b68eb497fb3fc98fea2956a6adde579c9ecf8ad7f0c3874c007a6e07b36f4ae2"} Sep 29 12:42:55 crc kubenswrapper[4611]: I0929 12:42:55.942265 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b68eb497fb3fc98fea2956a6adde579c9ecf8ad7f0c3874c007a6e07b36f4ae2" Sep 29 12:43:00 crc kubenswrapper[4611]: I0929 12:43:00.326292 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:43:00 crc kubenswrapper[4611]: I0929 12:43:00.331110 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c2df08da-22ae-44b9-b568-06bafc65932c-metrics-certs\") pod \"network-metrics-daemon-xtjl8\" (UID: \"c2df08da-22ae-44b9-b568-06bafc65932c\") " pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:43:00 crc kubenswrapper[4611]: I0929 12:43:00.347529 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xtjl8" Sep 29 12:43:00 crc kubenswrapper[4611]: I0929 12:43:00.954526 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-xtjl8"] Sep 29 12:43:00 crc kubenswrapper[4611]: W0929 12:43:00.968720 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2df08da_22ae_44b9_b568_06bafc65932c.slice/crio-72a02257bd5b8f550330a7b88e66b843daad61d14942f153180cd2ceff6b8fb2 WatchSource:0}: Error finding container 72a02257bd5b8f550330a7b88e66b843daad61d14942f153180cd2ceff6b8fb2: Status 404 returned error can't find the container with id 72a02257bd5b8f550330a7b88e66b843daad61d14942f153180cd2ceff6b8fb2 Sep 29 12:43:00 crc kubenswrapper[4611]: I0929 12:43:00.995590 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" event={"ID":"c2df08da-22ae-44b9-b568-06bafc65932c","Type":"ContainerStarted","Data":"72a02257bd5b8f550330a7b88e66b843daad61d14942f153180cd2ceff6b8fb2"} Sep 29 12:43:03 crc kubenswrapper[4611]: I0929 12:43:03.024892 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" event={"ID":"c2df08da-22ae-44b9-b568-06bafc65932c","Type":"ContainerStarted","Data":"6236e2ad107265ce42d2efdd5d2f103a3695543febed01f963643ac9249e4a68"} Sep 29 12:43:04 crc kubenswrapper[4611]: I0929 12:43:04.040798 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xtjl8" event={"ID":"c2df08da-22ae-44b9-b568-06bafc65932c","Type":"ContainerStarted","Data":"48f6cbfbbb0530cd4a551cf9f4aeb09dd4372d62b04c53eca40872533dfba8d5"} Sep 29 12:43:04 crc kubenswrapper[4611]: I0929 12:43:04.058903 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-xtjl8" podStartSLOduration=146.058884311 podStartE2EDuration="2m26.058884311s" podCreationTimestamp="2025-09-29 12:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:43:04.055272911 +0000 UTC m=+170.946792527" watchObservedRunningTime="2025-09-29 12:43:04.058884311 +0000 UTC m=+170.950403917" Sep 29 12:43:04 crc kubenswrapper[4611]: I0929 12:43:04.628902 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:43:04 crc kubenswrapper[4611]: I0929 12:43:04.628979 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:43:05 crc kubenswrapper[4611]: I0929 12:43:05.362594 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-jjrl6" Sep 29 12:43:07 crc kubenswrapper[4611]: I0929 12:43:07.464785 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:43:16 crc kubenswrapper[4611]: I0929 12:43:16.398807 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nk4d7" Sep 29 12:43:20 crc kubenswrapper[4611]: I0929 12:43:20.967738 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 12:43:28 crc kubenswrapper[4611]: E0929 12:43:28.906698 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 12:43:28 crc kubenswrapper[4611]: E0929 12:43:28.907409 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dq4sb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-nx6hn_openshift-marketplace(b1ddfd2d-adff-45e7-818f-4e1ddd410769): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:28 crc kubenswrapper[4611]: E0929 12:43:28.908591 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-nx6hn" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" Sep 29 12:43:30 crc kubenswrapper[4611]: E0929 12:43:30.431298 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-nx6hn" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" Sep 29 12:43:30 crc kubenswrapper[4611]: E0929 12:43:30.519407 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 12:43:30 crc kubenswrapper[4611]: E0929 12:43:30.519751 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6s26,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-rxkx4_openshift-marketplace(264d8c44-48fe-46fa-a3c9-d0df2dc4837a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:30 crc kubenswrapper[4611]: E0929 12:43:30.520967 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-rxkx4" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" Sep 29 12:43:31 crc kubenswrapper[4611]: E0929 12:43:31.731650 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-rxkx4" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" Sep 29 12:43:32 crc kubenswrapper[4611]: E0929 12:43:32.103220 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 12:43:32 crc kubenswrapper[4611]: E0929 12:43:32.103439 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t8ck7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-kd7t4_openshift-marketplace(d4fbaa17-e5ab-45cd-ba85-23cc115b07c0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:32 crc kubenswrapper[4611]: E0929 12:43:32.104605 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-kd7t4" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.262009 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-kd7t4" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.481331 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.481792 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5smjw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-mwfrw_openshift-marketplace(e26ad2ad-57ca-4097-ad5f-08162afc62b6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.483587 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-mwfrw" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.531666 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.531833 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rgc45,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9djgx_openshift-marketplace(5c14a4b9-4eeb-4174-8343-5ba7e2a2234d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.534707 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9djgx" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.756574 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.756817 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9ttkw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mzmvg_openshift-marketplace(2b22af10-abdb-4a44-bbb1-aa53980a366a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.758071 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mzmvg" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.812826 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.813016 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qcvnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-jdhsg_openshift-marketplace(2a2e9132-98ce-49b0-967c-e3d31eee618a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:33 crc kubenswrapper[4611]: E0929 12:43:33.814854 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-jdhsg" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.064730 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.065153 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dsdtp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-v58w2_openshift-marketplace(50f7035c-f8b1-40ec-b2e9-fc3f470eec0e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.066928 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-v58w2" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.194497 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-jdhsg" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.194612 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-v58w2" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.194737 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9djgx" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.195212 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mzmvg" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" Sep 29 12:43:34 crc kubenswrapper[4611]: E0929 12:43:34.195559 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-mwfrw" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" Sep 29 12:43:34 crc kubenswrapper[4611]: I0929 12:43:34.629226 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:43:34 crc kubenswrapper[4611]: I0929 12:43:34.629285 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:43:44 crc kubenswrapper[4611]: I0929 12:43:44.243582 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerStarted","Data":"accca1d3dfb2400834587245f0902c4f02ed80d5c6cbad9ac13cedec7e6014c2"} Sep 29 12:43:45 crc kubenswrapper[4611]: I0929 12:43:45.249914 4611 generic.go:334] "Generic (PLEG): container finished" podID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerID="accca1d3dfb2400834587245f0902c4f02ed80d5c6cbad9ac13cedec7e6014c2" exitCode=0 Sep 29 12:43:45 crc kubenswrapper[4611]: I0929 12:43:45.249982 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerDied","Data":"accca1d3dfb2400834587245f0902c4f02ed80d5c6cbad9ac13cedec7e6014c2"} Sep 29 12:43:46 crc kubenswrapper[4611]: I0929 12:43:46.258535 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerStarted","Data":"9184e8a57e484a3221d2b8d4c27da3e8d4c5008fad79d21d9bc5806a6df8958a"} Sep 29 12:43:46 crc kubenswrapper[4611]: I0929 12:43:46.261009 4611 generic.go:334] "Generic (PLEG): container finished" podID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerID="c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362" exitCode=0 Sep 29 12:43:46 crc kubenswrapper[4611]: I0929 12:43:46.261037 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxkx4" event={"ID":"264d8c44-48fe-46fa-a3c9-d0df2dc4837a","Type":"ContainerDied","Data":"c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362"} Sep 29 12:43:46 crc kubenswrapper[4611]: I0929 12:43:46.281245 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nx6hn" podStartSLOduration=2.29729606 podStartE2EDuration="57.281227774s" podCreationTimestamp="2025-09-29 12:42:49 +0000 UTC" firstStartedPulling="2025-09-29 12:42:50.781137287 +0000 UTC m=+157.672656893" lastFinishedPulling="2025-09-29 12:43:45.765069001 +0000 UTC m=+212.656588607" observedRunningTime="2025-09-29 12:43:46.279960059 +0000 UTC m=+213.171479675" watchObservedRunningTime="2025-09-29 12:43:46.281227774 +0000 UTC m=+213.172747380" Sep 29 12:43:48 crc kubenswrapper[4611]: I0929 12:43:48.273898 4611 generic.go:334] "Generic (PLEG): container finished" podID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerID="370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1" exitCode=0 Sep 29 12:43:48 crc kubenswrapper[4611]: I0929 12:43:48.273965 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdhsg" event={"ID":"2a2e9132-98ce-49b0-967c-e3d31eee618a","Type":"ContainerDied","Data":"370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1"} Sep 29 12:43:48 crc kubenswrapper[4611]: I0929 12:43:48.277019 4611 generic.go:334] "Generic (PLEG): container finished" podID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerID="e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb" exitCode=0 Sep 29 12:43:48 crc kubenswrapper[4611]: I0929 12:43:48.277105 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd7t4" event={"ID":"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0","Type":"ContainerDied","Data":"e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb"} Sep 29 12:43:48 crc kubenswrapper[4611]: I0929 12:43:48.279369 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxkx4" event={"ID":"264d8c44-48fe-46fa-a3c9-d0df2dc4837a","Type":"ContainerStarted","Data":"34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc"} Sep 29 12:43:48 crc kubenswrapper[4611]: I0929 12:43:48.328070 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rxkx4" podStartSLOduration=3.329681749 podStartE2EDuration="1m3.328050415s" podCreationTimestamp="2025-09-29 12:42:45 +0000 UTC" firstStartedPulling="2025-09-29 12:42:47.633704643 +0000 UTC m=+154.525224249" lastFinishedPulling="2025-09-29 12:43:47.632073289 +0000 UTC m=+214.523592915" observedRunningTime="2025-09-29 12:43:48.325302489 +0000 UTC m=+215.216822095" watchObservedRunningTime="2025-09-29 12:43:48.328050415 +0000 UTC m=+215.219570021" Sep 29 12:43:49 crc kubenswrapper[4611]: I0929 12:43:49.288258 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdhsg" event={"ID":"2a2e9132-98ce-49b0-967c-e3d31eee618a","Type":"ContainerStarted","Data":"9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7"} Sep 29 12:43:49 crc kubenswrapper[4611]: I0929 12:43:49.290151 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerStarted","Data":"dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396"} Sep 29 12:43:49 crc kubenswrapper[4611]: I0929 12:43:49.292095 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd7t4" event={"ID":"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0","Type":"ContainerStarted","Data":"ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31"} Sep 29 12:43:49 crc kubenswrapper[4611]: I0929 12:43:49.314071 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jdhsg" podStartSLOduration=3.314664857 podStartE2EDuration="1m2.314046799s" podCreationTimestamp="2025-09-29 12:42:47 +0000 UTC" firstStartedPulling="2025-09-29 12:42:49.698799679 +0000 UTC m=+156.590319285" lastFinishedPulling="2025-09-29 12:43:48.698181621 +0000 UTC m=+215.589701227" observedRunningTime="2025-09-29 12:43:49.307732446 +0000 UTC m=+216.199252072" watchObservedRunningTime="2025-09-29 12:43:49.314046799 +0000 UTC m=+216.205566405" Sep 29 12:43:49 crc kubenswrapper[4611]: I0929 12:43:49.623287 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:43:49 crc kubenswrapper[4611]: I0929 12:43:49.623691 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:43:50 crc kubenswrapper[4611]: I0929 12:43:50.299141 4611 generic.go:334] "Generic (PLEG): container finished" podID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerID="dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396" exitCode=0 Sep 29 12:43:50 crc kubenswrapper[4611]: I0929 12:43:50.300022 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerDied","Data":"dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396"} Sep 29 12:43:50 crc kubenswrapper[4611]: I0929 12:43:50.318195 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kd7t4" podStartSLOduration=4.351375075 podStartE2EDuration="1m3.318176551s" podCreationTimestamp="2025-09-29 12:42:47 +0000 UTC" firstStartedPulling="2025-09-29 12:42:49.765871901 +0000 UTC m=+156.657391507" lastFinishedPulling="2025-09-29 12:43:48.732673377 +0000 UTC m=+215.624192983" observedRunningTime="2025-09-29 12:43:50.316896896 +0000 UTC m=+217.208416502" watchObservedRunningTime="2025-09-29 12:43:50.318176551 +0000 UTC m=+217.209696157" Sep 29 12:43:51 crc kubenswrapper[4611]: I0929 12:43:51.071069 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nx6hn" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="registry-server" probeResult="failure" output=< Sep 29 12:43:51 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 12:43:51 crc kubenswrapper[4611]: > Sep 29 12:43:52 crc kubenswrapper[4611]: I0929 12:43:52.310098 4611 generic.go:334] "Generic (PLEG): container finished" podID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerID="e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c" exitCode=0 Sep 29 12:43:52 crc kubenswrapper[4611]: I0929 12:43:52.310199 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9djgx" event={"ID":"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d","Type":"ContainerDied","Data":"e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c"} Sep 29 12:43:55 crc kubenswrapper[4611]: I0929 12:43:55.859388 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:43:55 crc kubenswrapper[4611]: I0929 12:43:55.861429 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:43:55 crc kubenswrapper[4611]: I0929 12:43:55.960967 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:43:56 crc kubenswrapper[4611]: I0929 12:43:56.507118 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:43:57 crc kubenswrapper[4611]: I0929 12:43:57.830917 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:43:57 crc kubenswrapper[4611]: I0929 12:43:57.830970 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:43:57 crc kubenswrapper[4611]: I0929 12:43:57.868482 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:43:58 crc kubenswrapper[4611]: I0929 12:43:58.241187 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:43:58 crc kubenswrapper[4611]: I0929 12:43:58.241241 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:43:58 crc kubenswrapper[4611]: I0929 12:43:58.277766 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:43:58 crc kubenswrapper[4611]: I0929 12:43:58.412198 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:43:58 crc kubenswrapper[4611]: I0929 12:43:58.449345 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.371856 4611 generic.go:334] "Generic (PLEG): container finished" podID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerID="dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7" exitCode=0 Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.371937 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwfrw" event={"ID":"e26ad2ad-57ca-4097-ad5f-08162afc62b6","Type":"ContainerDied","Data":"dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7"} Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.374847 4611 generic.go:334] "Generic (PLEG): container finished" podID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerID="1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e" exitCode=0 Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.374881 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v58w2" event={"ID":"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e","Type":"ContainerDied","Data":"1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e"} Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.377612 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9djgx" event={"ID":"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d","Type":"ContainerStarted","Data":"fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b"} Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.385362 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerStarted","Data":"067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6"} Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.420819 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9djgx" podStartSLOduration=3.660728795 podStartE2EDuration="1m14.420800923s" podCreationTimestamp="2025-09-29 12:42:45 +0000 UTC" firstStartedPulling="2025-09-29 12:42:47.595332293 +0000 UTC m=+154.486851899" lastFinishedPulling="2025-09-29 12:43:58.355404421 +0000 UTC m=+225.246924027" observedRunningTime="2025-09-29 12:43:59.412973268 +0000 UTC m=+226.304492904" watchObservedRunningTime="2025-09-29 12:43:59.420800923 +0000 UTC m=+226.312320529" Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.436519 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mzmvg" podStartSLOduration=2.841784595 podStartE2EDuration="1m11.436499333s" podCreationTimestamp="2025-09-29 12:42:48 +0000 UTC" firstStartedPulling="2025-09-29 12:42:49.729854206 +0000 UTC m=+156.621373812" lastFinishedPulling="2025-09-29 12:43:58.324568944 +0000 UTC m=+225.216088550" observedRunningTime="2025-09-29 12:43:59.433997405 +0000 UTC m=+226.325517031" watchObservedRunningTime="2025-09-29 12:43:59.436499333 +0000 UTC m=+226.328018939" Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.517401 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdhsg"] Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.663238 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:43:59 crc kubenswrapper[4611]: I0929 12:43:59.718014 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:44:00 crc kubenswrapper[4611]: I0929 12:44:00.393041 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwfrw" event={"ID":"e26ad2ad-57ca-4097-ad5f-08162afc62b6","Type":"ContainerStarted","Data":"4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6"} Sep 29 12:44:00 crc kubenswrapper[4611]: I0929 12:44:00.395207 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v58w2" event={"ID":"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e","Type":"ContainerStarted","Data":"a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382"} Sep 29 12:44:00 crc kubenswrapper[4611]: I0929 12:44:00.395432 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jdhsg" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="registry-server" containerID="cri-o://9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7" gracePeriod=2 Sep 29 12:44:00 crc kubenswrapper[4611]: I0929 12:44:00.419028 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mwfrw" podStartSLOduration=2.156334474 podStartE2EDuration="1m14.419010532s" podCreationTimestamp="2025-09-29 12:42:46 +0000 UTC" firstStartedPulling="2025-09-29 12:42:47.588023001 +0000 UTC m=+154.479542607" lastFinishedPulling="2025-09-29 12:43:59.850699059 +0000 UTC m=+226.742218665" observedRunningTime="2025-09-29 12:44:00.410519349 +0000 UTC m=+227.302038955" watchObservedRunningTime="2025-09-29 12:44:00.419010532 +0000 UTC m=+227.310530128" Sep 29 12:44:00 crc kubenswrapper[4611]: I0929 12:44:00.435601 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v58w2" podStartSLOduration=3.258329841 podStartE2EDuration="1m15.435585747s" podCreationTimestamp="2025-09-29 12:42:45 +0000 UTC" firstStartedPulling="2025-09-29 12:42:47.626502894 +0000 UTC m=+154.518022500" lastFinishedPulling="2025-09-29 12:43:59.8037588 +0000 UTC m=+226.695278406" observedRunningTime="2025-09-29 12:44:00.432561704 +0000 UTC m=+227.324081310" watchObservedRunningTime="2025-09-29 12:44:00.435585747 +0000 UTC m=+227.327105353" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.310409 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.315105 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-utilities\") pod \"2a2e9132-98ce-49b0-967c-e3d31eee618a\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.315209 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcvnv\" (UniqueName: \"kubernetes.io/projected/2a2e9132-98ce-49b0-967c-e3d31eee618a-kube-api-access-qcvnv\") pod \"2a2e9132-98ce-49b0-967c-e3d31eee618a\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.315304 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-catalog-content\") pod \"2a2e9132-98ce-49b0-967c-e3d31eee618a\" (UID: \"2a2e9132-98ce-49b0-967c-e3d31eee618a\") " Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.316280 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-utilities" (OuterVolumeSpecName: "utilities") pod "2a2e9132-98ce-49b0-967c-e3d31eee618a" (UID: "2a2e9132-98ce-49b0-967c-e3d31eee618a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.319958 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a2e9132-98ce-49b0-967c-e3d31eee618a-kube-api-access-qcvnv" (OuterVolumeSpecName: "kube-api-access-qcvnv") pod "2a2e9132-98ce-49b0-967c-e3d31eee618a" (UID: "2a2e9132-98ce-49b0-967c-e3d31eee618a"). InnerVolumeSpecName "kube-api-access-qcvnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.339733 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a2e9132-98ce-49b0-967c-e3d31eee618a" (UID: "2a2e9132-98ce-49b0-967c-e3d31eee618a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.404599 4611 generic.go:334] "Generic (PLEG): container finished" podID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerID="9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7" exitCode=0 Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.404672 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdhsg" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.404674 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdhsg" event={"ID":"2a2e9132-98ce-49b0-967c-e3d31eee618a","Type":"ContainerDied","Data":"9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7"} Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.404740 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdhsg" event={"ID":"2a2e9132-98ce-49b0-967c-e3d31eee618a","Type":"ContainerDied","Data":"a66b343d6f53a3ec463856aed16cb79690af36d546841e9bd3c3c98561dac228"} Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.404762 4611 scope.go:117] "RemoveContainer" containerID="9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.416213 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.416237 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a2e9132-98ce-49b0-967c-e3d31eee618a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.416248 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcvnv\" (UniqueName: \"kubernetes.io/projected/2a2e9132-98ce-49b0-967c-e3d31eee618a-kube-api-access-qcvnv\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.423178 4611 scope.go:117] "RemoveContainer" containerID="370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.436740 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdhsg"] Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.447790 4611 scope.go:117] "RemoveContainer" containerID="3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.451547 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdhsg"] Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.462484 4611 scope.go:117] "RemoveContainer" containerID="9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7" Sep 29 12:44:01 crc kubenswrapper[4611]: E0929 12:44:01.463026 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7\": container with ID starting with 9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7 not found: ID does not exist" containerID="9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.463062 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7"} err="failed to get container status \"9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7\": rpc error: code = NotFound desc = could not find container \"9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7\": container with ID starting with 9e81faee8af01224a7819832f67cc306fcad744cc6cefb484006c5c78e4579b7 not found: ID does not exist" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.463113 4611 scope.go:117] "RemoveContainer" containerID="370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1" Sep 29 12:44:01 crc kubenswrapper[4611]: E0929 12:44:01.463596 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1\": container with ID starting with 370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1 not found: ID does not exist" containerID="370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.463639 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1"} err="failed to get container status \"370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1\": rpc error: code = NotFound desc = could not find container \"370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1\": container with ID starting with 370c6aaedcbc631e7e9f1245dd5f6169644a24965e6456b3fb459c5b9f6649f1 not found: ID does not exist" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.463660 4611 scope.go:117] "RemoveContainer" containerID="3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9" Sep 29 12:44:01 crc kubenswrapper[4611]: E0929 12:44:01.464119 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9\": container with ID starting with 3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9 not found: ID does not exist" containerID="3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.464168 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9"} err="failed to get container status \"3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9\": rpc error: code = NotFound desc = could not find container \"3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9\": container with ID starting with 3b878f21ece4484b3b6ad47e101429156df1fb6472dfd57f976d4dc49010bab9 not found: ID does not exist" Sep 29 12:44:01 crc kubenswrapper[4611]: I0929 12:44:01.744011 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" path="/var/lib/kubelet/pods/2a2e9132-98ce-49b0-967c-e3d31eee618a/volumes" Sep 29 12:44:03 crc kubenswrapper[4611]: I0929 12:44:03.722842 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nx6hn"] Sep 29 12:44:03 crc kubenswrapper[4611]: I0929 12:44:03.723463 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nx6hn" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="registry-server" containerID="cri-o://9184e8a57e484a3221d2b8d4c27da3e8d4c5008fad79d21d9bc5806a6df8958a" gracePeriod=2 Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.434026 4611 generic.go:334] "Generic (PLEG): container finished" podID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerID="9184e8a57e484a3221d2b8d4c27da3e8d4c5008fad79d21d9bc5806a6df8958a" exitCode=0 Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.434189 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerDied","Data":"9184e8a57e484a3221d2b8d4c27da3e8d4c5008fad79d21d9bc5806a6df8958a"} Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.628377 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.628430 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.628478 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.629036 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 12:44:04 crc kubenswrapper[4611]: I0929 12:44:04.629104 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d" gracePeriod=600 Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.352065 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qght9"] Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.440779 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d" exitCode=0 Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.440820 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d"} Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.648839 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.764828 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq4sb\" (UniqueName: \"kubernetes.io/projected/b1ddfd2d-adff-45e7-818f-4e1ddd410769-kube-api-access-dq4sb\") pod \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.764945 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-catalog-content\") pod \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.764982 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-utilities\") pod \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\" (UID: \"b1ddfd2d-adff-45e7-818f-4e1ddd410769\") " Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.765959 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-utilities" (OuterVolumeSpecName: "utilities") pod "b1ddfd2d-adff-45e7-818f-4e1ddd410769" (UID: "b1ddfd2d-adff-45e7-818f-4e1ddd410769"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.780170 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1ddfd2d-adff-45e7-818f-4e1ddd410769-kube-api-access-dq4sb" (OuterVolumeSpecName: "kube-api-access-dq4sb") pod "b1ddfd2d-adff-45e7-818f-4e1ddd410769" (UID: "b1ddfd2d-adff-45e7-818f-4e1ddd410769"). InnerVolumeSpecName "kube-api-access-dq4sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.848543 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1ddfd2d-adff-45e7-818f-4e1ddd410769" (UID: "b1ddfd2d-adff-45e7-818f-4e1ddd410769"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.866193 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.866442 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ddfd2d-adff-45e7-818f-4e1ddd410769-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:05 crc kubenswrapper[4611]: I0929 12:44:05.866458 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq4sb\" (UniqueName: \"kubernetes.io/projected/b1ddfd2d-adff-45e7-818f-4e1ddd410769-kube-api-access-dq4sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.094886 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.095026 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.150203 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.289440 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.289503 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.360049 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.447861 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"02c1cf1e6d77a414178e7c5de630b700218c10ae6c54f2823aa24b1d4995bcf3"} Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.450050 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nx6hn" event={"ID":"b1ddfd2d-adff-45e7-818f-4e1ddd410769","Type":"ContainerDied","Data":"dc9880898955f640700d4b3b5ef0b0ea0cc7d2d7fb2e96ae5281d0a1901ce00b"} Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.450083 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nx6hn" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.450091 4611 scope.go:117] "RemoveContainer" containerID="9184e8a57e484a3221d2b8d4c27da3e8d4c5008fad79d21d9bc5806a6df8958a" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.454447 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.454483 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.509690 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nx6hn"] Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.517408 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nx6hn"] Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.521833 4611 scope.go:117] "RemoveContainer" containerID="accca1d3dfb2400834587245f0902c4f02ed80d5c6cbad9ac13cedec7e6014c2" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.539915 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.555245 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.560133 4611 scope.go:117] "RemoveContainer" containerID="fe9cb72a650acf93412ea06339afef7edef5cca42e88042e81ce56e7345ea850" Sep 29 12:44:06 crc kubenswrapper[4611]: I0929 12:44:06.567890 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:44:07 crc kubenswrapper[4611]: I0929 12:44:07.495562 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:44:07 crc kubenswrapper[4611]: I0929 12:44:07.744809 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" path="/var/lib/kubelet/pods/b1ddfd2d-adff-45e7-818f-4e1ddd410769/volumes" Sep 29 12:44:09 crc kubenswrapper[4611]: I0929 12:44:09.222053 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:44:09 crc kubenswrapper[4611]: I0929 12:44:09.222104 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:44:09 crc kubenswrapper[4611]: I0929 12:44:09.266580 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:44:09 crc kubenswrapper[4611]: I0929 12:44:09.502612 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:44:09 crc kubenswrapper[4611]: I0929 12:44:09.921523 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwfrw"] Sep 29 12:44:09 crc kubenswrapper[4611]: I0929 12:44:09.921938 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mwfrw" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="registry-server" containerID="cri-o://4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6" gracePeriod=2 Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.120749 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9djgx"] Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.121013 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9djgx" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="registry-server" containerID="cri-o://fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b" gracePeriod=2 Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.253228 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.321891 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-utilities\") pod \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.321935 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-catalog-content\") pod \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.321959 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5smjw\" (UniqueName: \"kubernetes.io/projected/e26ad2ad-57ca-4097-ad5f-08162afc62b6-kube-api-access-5smjw\") pod \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\" (UID: \"e26ad2ad-57ca-4097-ad5f-08162afc62b6\") " Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.322797 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-utilities" (OuterVolumeSpecName: "utilities") pod "e26ad2ad-57ca-4097-ad5f-08162afc62b6" (UID: "e26ad2ad-57ca-4097-ad5f-08162afc62b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.344293 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e26ad2ad-57ca-4097-ad5f-08162afc62b6-kube-api-access-5smjw" (OuterVolumeSpecName: "kube-api-access-5smjw") pod "e26ad2ad-57ca-4097-ad5f-08162afc62b6" (UID: "e26ad2ad-57ca-4097-ad5f-08162afc62b6"). InnerVolumeSpecName "kube-api-access-5smjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.423567 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.423939 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5smjw\" (UniqueName: \"kubernetes.io/projected/e26ad2ad-57ca-4097-ad5f-08162afc62b6-kube-api-access-5smjw\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.459076 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.478804 4611 generic.go:334] "Generic (PLEG): container finished" podID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerID="4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6" exitCode=0 Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.478866 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwfrw" event={"ID":"e26ad2ad-57ca-4097-ad5f-08162afc62b6","Type":"ContainerDied","Data":"4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6"} Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.478888 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwfrw" event={"ID":"e26ad2ad-57ca-4097-ad5f-08162afc62b6","Type":"ContainerDied","Data":"59cd5bee2dbee659fa78c4a790f5b5cebde9f1f92c336c7293148732bf609bbc"} Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.478904 4611 scope.go:117] "RemoveContainer" containerID="4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.479017 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwfrw" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.483148 4611 generic.go:334] "Generic (PLEG): container finished" podID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerID="fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b" exitCode=0 Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.483269 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9djgx" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.483282 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9djgx" event={"ID":"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d","Type":"ContainerDied","Data":"fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b"} Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.483772 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9djgx" event={"ID":"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d","Type":"ContainerDied","Data":"314ac21b0525302f0dc47dc141cd169fff2fe181e838ed97d83194c4014a404b"} Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.495478 4611 scope.go:117] "RemoveContainer" containerID="dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.512952 4611 scope.go:117] "RemoveContainer" containerID="f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.523688 4611 scope.go:117] "RemoveContainer" containerID="4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6" Sep 29 12:44:10 crc kubenswrapper[4611]: E0929 12:44:10.524395 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6\": container with ID starting with 4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6 not found: ID does not exist" containerID="4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.524445 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6"} err="failed to get container status \"4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6\": rpc error: code = NotFound desc = could not find container \"4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6\": container with ID starting with 4c79177a82d884747cb418085d51584bbead31f66c5ea066070cd5b05432a1b6 not found: ID does not exist" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.524469 4611 scope.go:117] "RemoveContainer" containerID="dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7" Sep 29 12:44:10 crc kubenswrapper[4611]: E0929 12:44:10.524907 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7\": container with ID starting with dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7 not found: ID does not exist" containerID="dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.524932 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7"} err="failed to get container status \"dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7\": rpc error: code = NotFound desc = could not find container \"dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7\": container with ID starting with dbd40a7bc0298c8e9cc1e891fc0ed5f1cee055fd5ff6334c08943d05a8946fb7 not found: ID does not exist" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.524952 4611 scope.go:117] "RemoveContainer" containerID="f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817" Sep 29 12:44:10 crc kubenswrapper[4611]: E0929 12:44:10.525203 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817\": container with ID starting with f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817 not found: ID does not exist" containerID="f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.525220 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817"} err="failed to get container status \"f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817\": rpc error: code = NotFound desc = could not find container \"f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817\": container with ID starting with f09f44d08191a0c6ba6b181068b5cb2ac189eb87b6264ae9d1e2b664d9593817 not found: ID does not exist" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.525242 4611 scope.go:117] "RemoveContainer" containerID="fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.536817 4611 scope.go:117] "RemoveContainer" containerID="e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.553083 4611 scope.go:117] "RemoveContainer" containerID="ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.567538 4611 scope.go:117] "RemoveContainer" containerID="fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b" Sep 29 12:44:10 crc kubenswrapper[4611]: E0929 12:44:10.567981 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b\": container with ID starting with fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b not found: ID does not exist" containerID="fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.568012 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b"} err="failed to get container status \"fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b\": rpc error: code = NotFound desc = could not find container \"fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b\": container with ID starting with fc982dbd00e8ce0308674cbc8a4e1faa04f835feebb556d97226f960bab1f29b not found: ID does not exist" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.568037 4611 scope.go:117] "RemoveContainer" containerID="e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c" Sep 29 12:44:10 crc kubenswrapper[4611]: E0929 12:44:10.568377 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c\": container with ID starting with e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c not found: ID does not exist" containerID="e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.568506 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c"} err="failed to get container status \"e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c\": rpc error: code = NotFound desc = could not find container \"e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c\": container with ID starting with e51297bf2924ce40b52dd074818ecccfaaa1b2f633b2aa6837ff80da06e9ff7c not found: ID does not exist" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.568596 4611 scope.go:117] "RemoveContainer" containerID="ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd" Sep 29 12:44:10 crc kubenswrapper[4611]: E0929 12:44:10.570423 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd\": container with ID starting with ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd not found: ID does not exist" containerID="ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.570540 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd"} err="failed to get container status \"ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd\": rpc error: code = NotFound desc = could not find container \"ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd\": container with ID starting with ef8d67ca01175b863bcdcd897dd89b9b62c105e761b4c91f6c201855aa1602bd not found: ID does not exist" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.599500 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e26ad2ad-57ca-4097-ad5f-08162afc62b6" (UID: "e26ad2ad-57ca-4097-ad5f-08162afc62b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.625105 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-catalog-content\") pod \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.625445 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgc45\" (UniqueName: \"kubernetes.io/projected/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-kube-api-access-rgc45\") pod \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.625531 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-utilities\") pod \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\" (UID: \"5c14a4b9-4eeb-4174-8343-5ba7e2a2234d\") " Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.625822 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e26ad2ad-57ca-4097-ad5f-08162afc62b6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.626475 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-utilities" (OuterVolumeSpecName: "utilities") pod "5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" (UID: "5c14a4b9-4eeb-4174-8343-5ba7e2a2234d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.629376 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-kube-api-access-rgc45" (OuterVolumeSpecName: "kube-api-access-rgc45") pod "5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" (UID: "5c14a4b9-4eeb-4174-8343-5ba7e2a2234d"). InnerVolumeSpecName "kube-api-access-rgc45". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.668330 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" (UID: "5c14a4b9-4eeb-4174-8343-5ba7e2a2234d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.726947 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.726987 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgc45\" (UniqueName: \"kubernetes.io/projected/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-kube-api-access-rgc45\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.726999 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.807835 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwfrw"] Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.814428 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mwfrw"] Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.821933 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9djgx"] Sep 29 12:44:10 crc kubenswrapper[4611]: I0929 12:44:10.825122 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9djgx"] Sep 29 12:44:11 crc kubenswrapper[4611]: I0929 12:44:11.742131 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" path="/var/lib/kubelet/pods/5c14a4b9-4eeb-4174-8343-5ba7e2a2234d/volumes" Sep 29 12:44:11 crc kubenswrapper[4611]: I0929 12:44:11.743025 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" path="/var/lib/kubelet/pods/e26ad2ad-57ca-4097-ad5f-08162afc62b6/volumes" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.384700 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerName="oauth-openshift" containerID="cri-o://e48c88a4491ff6bf7c65e125c335cb591afa74e74afaa8327481904e3746d569" gracePeriod=15 Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.594454 4611 generic.go:334] "Generic (PLEG): container finished" podID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerID="e48c88a4491ff6bf7c65e125c335cb591afa74e74afaa8327481904e3746d569" exitCode=0 Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.594496 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" event={"ID":"f5f3de88-84fb-42b9-953f-74f002a8af28","Type":"ContainerDied","Data":"e48c88a4491ff6bf7c65e125c335cb591afa74e74afaa8327481904e3746d569"} Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.706363 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.738725 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-649d76d5b4-nd8w4"] Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.738968 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.738979 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.738992 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.738998 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739007 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739013 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739023 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739029 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739038 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739043 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739054 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739060 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739067 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739073 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739079 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739084 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739092 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739098 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739109 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94612e18-a9da-4ea2-bf41-bc79d340a6dc" containerName="pruner" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739116 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="94612e18-a9da-4ea2-bf41-bc79d340a6dc" containerName="pruner" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739124 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739130 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739136 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739141 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="extract-utilities" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739149 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerName="oauth-openshift" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739163 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerName="oauth-openshift" Sep 29 12:44:30 crc kubenswrapper[4611]: E0929 12:44:30.739174 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739181 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="extract-content" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739281 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" containerName="oauth-openshift" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739294 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a2e9132-98ce-49b0-967c-e3d31eee618a" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739306 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="94612e18-a9da-4ea2-bf41-bc79d340a6dc" containerName="pruner" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739313 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26ad2ad-57ca-4097-ad5f-08162afc62b6" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739322 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c14a4b9-4eeb-4174-8343-5ba7e2a2234d" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739329 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ddfd2d-adff-45e7-818f-4e1ddd410769" containerName="registry-server" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.739686 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.760814 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-649d76d5b4-nd8w4"] Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.877348 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-policies\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.877767 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-router-certs\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878009 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-trusted-ca-bundle\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878121 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-dir\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878223 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-serving-cert\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878331 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878435 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-login\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878548 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-service-ca\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878677 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878795 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm4ck\" (UniqueName: \"kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878906 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-provider-selection\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879016 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879220 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-session\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879495 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-idp-0-file-data\") pod \"f5f3de88-84fb-42b9-953f-74f002a8af28\" (UID: \"f5f3de88-84fb-42b9-953f-74f002a8af28\") " Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879788 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-service-ca\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879832 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-session\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879857 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879883 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879921 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-login\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879946 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-serving-cert\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879967 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-router-certs\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879988 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.880018 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-cliconfig\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.880047 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-error\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.880071 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.880117 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-audit-policies\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.880150 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d3296247-3de9-4b48-927f-ff6ea1506e67-audit-dir\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.880221 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdjlg\" (UniqueName: \"kubernetes.io/projected/d3296247-3de9-4b48-927f-ff6ea1506e67-kube-api-access-sdjlg\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878459 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878499 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.878801 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879159 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.879302 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.883646 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.884422 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.885666 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.888060 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck" (OuterVolumeSpecName: "kube-api-access-cm4ck") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "kube-api-access-cm4ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.892007 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.895002 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.895917 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.896159 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.896447 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f5f3de88-84fb-42b9-953f-74f002a8af28" (UID: "f5f3de88-84fb-42b9-953f-74f002a8af28"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.981534 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-service-ca\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.981854 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-session\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982026 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982145 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982294 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-login\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982410 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-service-ca\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982508 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-serving-cert\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982611 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-router-certs\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982738 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982856 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-cliconfig\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.982984 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-error\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983104 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983225 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-audit-policies\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983357 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d3296247-3de9-4b48-927f-ff6ea1506e67-audit-dir\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983503 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdjlg\" (UniqueName: \"kubernetes.io/projected/d3296247-3de9-4b48-927f-ff6ea1506e67-kube-api-access-sdjlg\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983710 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983794 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-cliconfig\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983809 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983869 4611 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983886 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983902 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983916 4611 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5f3de88-84fb-42b9-953f-74f002a8af28-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983931 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983945 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983961 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983977 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983993 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.984007 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm4ck\" (UniqueName: \"kubernetes.io/projected/f5f3de88-84fb-42b9-953f-74f002a8af28-kube-api-access-cm4ck\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.984020 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.984038 4611 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5f3de88-84fb-42b9-953f-74f002a8af28-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.983230 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.984948 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d3296247-3de9-4b48-927f-ff6ea1506e67-audit-policies\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.985668 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d3296247-3de9-4b48-927f-ff6ea1506e67-audit-dir\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.987748 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-session\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.987898 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-router-certs\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.987970 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-serving-cert\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.987955 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.990530 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-error\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.991346 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.992156 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-template-login\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:30 crc kubenswrapper[4611]: I0929 12:44:30.999330 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d3296247-3de9-4b48-927f-ff6ea1506e67-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.000273 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdjlg\" (UniqueName: \"kubernetes.io/projected/d3296247-3de9-4b48-927f-ff6ea1506e67-kube-api-access-sdjlg\") pod \"oauth-openshift-649d76d5b4-nd8w4\" (UID: \"d3296247-3de9-4b48-927f-ff6ea1506e67\") " pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.070521 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.260449 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-649d76d5b4-nd8w4"] Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.601920 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" event={"ID":"d3296247-3de9-4b48-927f-ff6ea1506e67","Type":"ContainerStarted","Data":"53b931620e5369c9398a952a711a01d43c1d7ea083e9616aba5cdc9fccab014b"} Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.602255 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" event={"ID":"d3296247-3de9-4b48-927f-ff6ea1506e67","Type":"ContainerStarted","Data":"cf197f2341695e5a6533718db5075b768f98ea58bd9a3dde529ce8ee5ce309a8"} Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.602280 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.606120 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" event={"ID":"f5f3de88-84fb-42b9-953f-74f002a8af28","Type":"ContainerDied","Data":"c77410c5869733d44abfc79f9f01ac0b23d948c3fe47ee9683d6701f81b704a6"} Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.606157 4611 scope.go:117] "RemoveContainer" containerID="e48c88a4491ff6bf7c65e125c335cb591afa74e74afaa8327481904e3746d569" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.606260 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qght9" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.653666 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" podStartSLOduration=26.653646713 podStartE2EDuration="26.653646713s" podCreationTimestamp="2025-09-29 12:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:44:31.6509866 +0000 UTC m=+258.542506226" watchObservedRunningTime="2025-09-29 12:44:31.653646713 +0000 UTC m=+258.545166319" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.665580 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qght9"] Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.668593 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qght9"] Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.742541 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5f3de88-84fb-42b9-953f-74f002a8af28" path="/var/lib/kubelet/pods/f5f3de88-84fb-42b9-953f-74f002a8af28/volumes" Sep 29 12:44:31 crc kubenswrapper[4611]: I0929 12:44:31.921049 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-649d76d5b4-nd8w4" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.094246 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rxkx4"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.095266 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rxkx4" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="registry-server" containerID="cri-o://34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc" gracePeriod=30 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.127146 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v58w2"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.137295 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v58w2" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="registry-server" containerID="cri-o://a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382" gracePeriod=30 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.145151 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rcs29"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.145850 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" containerName="marketplace-operator" containerID="cri-o://6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7" gracePeriod=30 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.154302 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd7t4"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.154594 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kd7t4" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="registry-server" containerID="cri-o://ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31" gracePeriod=30 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.166009 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mzmvg"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.166341 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mzmvg" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="registry-server" containerID="cri-o://067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6" gracePeriod=30 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.167886 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7s6xd"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.168536 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.173880 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7s6xd"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.341212 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjcnn\" (UniqueName: \"kubernetes.io/projected/471e6196-70f6-4f58-b544-aec3c50ec4b7-kube-api-access-zjcnn\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.341333 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/471e6196-70f6-4f58-b544-aec3c50ec4b7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.341381 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/471e6196-70f6-4f58-b544-aec3c50ec4b7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.445331 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjcnn\" (UniqueName: \"kubernetes.io/projected/471e6196-70f6-4f58-b544-aec3c50ec4b7-kube-api-access-zjcnn\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.445450 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/471e6196-70f6-4f58-b544-aec3c50ec4b7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.445491 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/471e6196-70f6-4f58-b544-aec3c50ec4b7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.447594 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/471e6196-70f6-4f58-b544-aec3c50ec4b7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.452708 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/471e6196-70f6-4f58-b544-aec3c50ec4b7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.466494 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjcnn\" (UniqueName: \"kubernetes.io/projected/471e6196-70f6-4f58-b544-aec3c50ec4b7-kube-api-access-zjcnn\") pod \"marketplace-operator-79b997595-7s6xd\" (UID: \"471e6196-70f6-4f58-b544-aec3c50ec4b7\") " pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.549688 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.568036 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.651511 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-catalog-content\") pod \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.651615 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-utilities\") pod \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.651697 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6s26\" (UniqueName: \"kubernetes.io/projected/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-kube-api-access-x6s26\") pod \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\" (UID: \"264d8c44-48fe-46fa-a3c9-d0df2dc4837a\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.653283 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-utilities" (OuterVolumeSpecName: "utilities") pod "264d8c44-48fe-46fa-a3c9-d0df2dc4837a" (UID: "264d8c44-48fe-46fa-a3c9-d0df2dc4837a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.658017 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.660535 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-kube-api-access-x6s26" (OuterVolumeSpecName: "kube-api-access-x6s26") pod "264d8c44-48fe-46fa-a3c9-d0df2dc4837a" (UID: "264d8c44-48fe-46fa-a3c9-d0df2dc4837a"). InnerVolumeSpecName "kube-api-access-x6s26". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.674721 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.698469 4611 generic.go:334] "Generic (PLEG): container finished" podID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerID="067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6" exitCode=0 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.698575 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerDied","Data":"067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.698737 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mzmvg" event={"ID":"2b22af10-abdb-4a44-bbb1-aa53980a366a","Type":"ContainerDied","Data":"b58b912e0ed4aa11fd664a2e86a0d3cbda16a2d06c78858be3157d15b4fd4d55"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.698774 4611 scope.go:117] "RemoveContainer" containerID="067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.699050 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mzmvg" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.708599 4611 generic.go:334] "Generic (PLEG): container finished" podID="00b46678-03aa-4e70-af99-c622eb2b2508" containerID="6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7" exitCode=0 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.708895 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" event={"ID":"00b46678-03aa-4e70-af99-c622eb2b2508","Type":"ContainerDied","Data":"6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.708946 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" event={"ID":"00b46678-03aa-4e70-af99-c622eb2b2508","Type":"ContainerDied","Data":"76dc0f0c553516333867ee66bab427164fd244cce4d3b37a78e4fc7347e9a206"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.708867 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rcs29" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.709856 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.712318 4611 generic.go:334] "Generic (PLEG): container finished" podID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerID="a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382" exitCode=0 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.712469 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v58w2" event={"ID":"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e","Type":"ContainerDied","Data":"a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.712672 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v58w2" event={"ID":"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e","Type":"ContainerDied","Data":"27f6980f28e37db79778cec7d131725aa6f3572f72eabd1588e6e6d99548d397"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.712870 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v58w2" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.730852 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.733712 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "264d8c44-48fe-46fa-a3c9-d0df2dc4837a" (UID: "264d8c44-48fe-46fa-a3c9-d0df2dc4837a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.736906 4611 scope.go:117] "RemoveContainer" containerID="dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.752761 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-catalog-content\") pod \"2b22af10-abdb-4a44-bbb1-aa53980a366a\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.753921 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ttkw\" (UniqueName: \"kubernetes.io/projected/2b22af10-abdb-4a44-bbb1-aa53980a366a-kube-api-access-9ttkw\") pod \"2b22af10-abdb-4a44-bbb1-aa53980a366a\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.754272 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-utilities\") pod \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.754460 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-catalog-content\") pod \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.754865 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsdtp\" (UniqueName: \"kubernetes.io/projected/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-kube-api-access-dsdtp\") pod \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\" (UID: \"50f7035c-f8b1-40ec-b2e9-fc3f470eec0e\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.756391 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-utilities\") pod \"2b22af10-abdb-4a44-bbb1-aa53980a366a\" (UID: \"2b22af10-abdb-4a44-bbb1-aa53980a366a\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.758162 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6s26\" (UniqueName: \"kubernetes.io/projected/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-kube-api-access-x6s26\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.758295 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.758379 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/264d8c44-48fe-46fa-a3c9-d0df2dc4837a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.760127 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-utilities" (OuterVolumeSpecName: "utilities") pod "2b22af10-abdb-4a44-bbb1-aa53980a366a" (UID: "2b22af10-abdb-4a44-bbb1-aa53980a366a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.762946 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-utilities" (OuterVolumeSpecName: "utilities") pod "50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" (UID: "50f7035c-f8b1-40ec-b2e9-fc3f470eec0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.765025 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b22af10-abdb-4a44-bbb1-aa53980a366a-kube-api-access-9ttkw" (OuterVolumeSpecName: "kube-api-access-9ttkw") pod "2b22af10-abdb-4a44-bbb1-aa53980a366a" (UID: "2b22af10-abdb-4a44-bbb1-aa53980a366a"). InnerVolumeSpecName "kube-api-access-9ttkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.765121 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxkx4" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.765506 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxkx4" event={"ID":"264d8c44-48fe-46fa-a3c9-d0df2dc4837a","Type":"ContainerDied","Data":"34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.765028 4611 generic.go:334] "Generic (PLEG): container finished" podID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerID="34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc" exitCode=0 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.767075 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxkx4" event={"ID":"264d8c44-48fe-46fa-a3c9-d0df2dc4837a","Type":"ContainerDied","Data":"8d9ec578036bb871b345b764f93677bdef2551cd2d15a95499f1c5b6c289a84f"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.778850 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-kube-api-access-dsdtp" (OuterVolumeSpecName: "kube-api-access-dsdtp") pod "50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" (UID: "50f7035c-f8b1-40ec-b2e9-fc3f470eec0e"). InnerVolumeSpecName "kube-api-access-dsdtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.788143 4611 scope.go:117] "RemoveContainer" containerID="28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.797173 4611 generic.go:334] "Generic (PLEG): container finished" podID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerID="ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31" exitCode=0 Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.797328 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd7t4" event={"ID":"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0","Type":"ContainerDied","Data":"ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.797457 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd7t4" event={"ID":"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0","Type":"ContainerDied","Data":"1401d83e703968448184f967e2f025b8414b71189dfd0f91aee0ae9de161759e"} Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.797725 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd7t4" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.833200 4611 scope.go:117] "RemoveContainer" containerID="067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.835261 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6\": container with ID starting with 067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6 not found: ID does not exist" containerID="067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.835305 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6"} err="failed to get container status \"067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6\": rpc error: code = NotFound desc = could not find container \"067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6\": container with ID starting with 067268d1e1f7a70bfef061ad5ebffb042251a11cee3462ffdcbf2ced6921b7c6 not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.835334 4611 scope.go:117] "RemoveContainer" containerID="dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.835808 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396\": container with ID starting with dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396 not found: ID does not exist" containerID="dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.835838 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396"} err="failed to get container status \"dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396\": rpc error: code = NotFound desc = could not find container \"dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396\": container with ID starting with dfb6ff635566656903a9d57e04868ea56e395bd25ea0390d1153621f91313396 not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.835853 4611 scope.go:117] "RemoveContainer" containerID="28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.836400 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580\": container with ID starting with 28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580 not found: ID does not exist" containerID="28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.836428 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580"} err="failed to get container status \"28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580\": rpc error: code = NotFound desc = could not find container \"28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580\": container with ID starting with 28318e0370f7e00349add455dbd36349a70cb32a18e7eef01e19dcf7381fc580 not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.836441 4611 scope.go:117] "RemoveContainer" containerID="6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.842141 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" (UID: "50f7035c-f8b1-40ec-b2e9-fc3f470eec0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.858936 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-trusted-ca\") pod \"00b46678-03aa-4e70-af99-c622eb2b2508\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859017 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjtfk\" (UniqueName: \"kubernetes.io/projected/00b46678-03aa-4e70-af99-c622eb2b2508-kube-api-access-jjtfk\") pod \"00b46678-03aa-4e70-af99-c622eb2b2508\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859044 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-operator-metrics\") pod \"00b46678-03aa-4e70-af99-c622eb2b2508\" (UID: \"00b46678-03aa-4e70-af99-c622eb2b2508\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859102 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-utilities\") pod \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859140 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-catalog-content\") pod \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859161 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8ck7\" (UniqueName: \"kubernetes.io/projected/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-kube-api-access-t8ck7\") pod \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\" (UID: \"d4fbaa17-e5ab-45cd-ba85-23cc115b07c0\") " Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859410 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ttkw\" (UniqueName: \"kubernetes.io/projected/2b22af10-abdb-4a44-bbb1-aa53980a366a-kube-api-access-9ttkw\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859421 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859430 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859438 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsdtp\" (UniqueName: \"kubernetes.io/projected/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e-kube-api-access-dsdtp\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.859446 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.860249 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "00b46678-03aa-4e70-af99-c622eb2b2508" (UID: "00b46678-03aa-4e70-af99-c622eb2b2508"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.862571 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-utilities" (OuterVolumeSpecName: "utilities") pod "d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" (UID: "d4fbaa17-e5ab-45cd-ba85-23cc115b07c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.865462 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "00b46678-03aa-4e70-af99-c622eb2b2508" (UID: "00b46678-03aa-4e70-af99-c622eb2b2508"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.866223 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rxkx4"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.868971 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00b46678-03aa-4e70-af99-c622eb2b2508-kube-api-access-jjtfk" (OuterVolumeSpecName: "kube-api-access-jjtfk") pod "00b46678-03aa-4e70-af99-c622eb2b2508" (UID: "00b46678-03aa-4e70-af99-c622eb2b2508"). InnerVolumeSpecName "kube-api-access-jjtfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.869571 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rxkx4"] Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.870241 4611 scope.go:117] "RemoveContainer" containerID="6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.872912 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-kube-api-access-t8ck7" (OuterVolumeSpecName: "kube-api-access-t8ck7") pod "d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" (UID: "d4fbaa17-e5ab-45cd-ba85-23cc115b07c0"). InnerVolumeSpecName "kube-api-access-t8ck7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.876487 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7\": container with ID starting with 6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7 not found: ID does not exist" containerID="6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.876568 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7"} err="failed to get container status \"6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7\": rpc error: code = NotFound desc = could not find container \"6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7\": container with ID starting with 6b896c82ad3c727689cdcc901e4bfb1f16349f8b3d7d644e04b4241ccfbe03d7 not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.876607 4611 scope.go:117] "RemoveContainer" containerID="a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.878313 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" (UID: "d4fbaa17-e5ab-45cd-ba85-23cc115b07c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.879198 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b22af10-abdb-4a44-bbb1-aa53980a366a" (UID: "2b22af10-abdb-4a44-bbb1-aa53980a366a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.904137 4611 scope.go:117] "RemoveContainer" containerID="1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.931516 4611 scope.go:117] "RemoveContainer" containerID="3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.954643 4611 scope.go:117] "RemoveContainer" containerID="a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.955195 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382\": container with ID starting with a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382 not found: ID does not exist" containerID="a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.955228 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382"} err="failed to get container status \"a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382\": rpc error: code = NotFound desc = could not find container \"a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382\": container with ID starting with a24f9dc4c2b5be4b2d2dee35fb20de83f42663fb46e166ae773d731300179382 not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.955281 4611 scope.go:117] "RemoveContainer" containerID="1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.955617 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e\": container with ID starting with 1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e not found: ID does not exist" containerID="1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.955726 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e"} err="failed to get container status \"1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e\": rpc error: code = NotFound desc = could not find container \"1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e\": container with ID starting with 1836f9a3fefbf613914e21e23a106e5fb9cb8e18d0393aeeab7e5c0a7b33a75e not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.955747 4611 scope.go:117] "RemoveContainer" containerID="3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069" Sep 29 12:44:44 crc kubenswrapper[4611]: E0929 12:44:44.956058 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069\": container with ID starting with 3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069 not found: ID does not exist" containerID="3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.956080 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069"} err="failed to get container status \"3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069\": rpc error: code = NotFound desc = could not find container \"3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069\": container with ID starting with 3aa9cdade686b20277a3a4a72a03ee28e838b5f6fd3ebd1dcf39eba0bc131069 not found: ID does not exist" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.956098 4611 scope.go:117] "RemoveContainer" containerID="34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.961675 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b22af10-abdb-4a44-bbb1-aa53980a366a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.961736 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.961749 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.962669 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8ck7\" (UniqueName: \"kubernetes.io/projected/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0-kube-api-access-t8ck7\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.962694 4611 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.962827 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjtfk\" (UniqueName: \"kubernetes.io/projected/00b46678-03aa-4e70-af99-c622eb2b2508-kube-api-access-jjtfk\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.962842 4611 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00b46678-03aa-4e70-af99-c622eb2b2508-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 12:44:44 crc kubenswrapper[4611]: I0929 12:44:44.995490 4611 scope.go:117] "RemoveContainer" containerID="c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.066808 4611 scope.go:117] "RemoveContainer" containerID="ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.077281 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rcs29"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.091093 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rcs29"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.112441 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mzmvg"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.122578 4611 scope.go:117] "RemoveContainer" containerID="34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.126846 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mzmvg"] Sep 29 12:44:45 crc kubenswrapper[4611]: E0929 12:44:45.127506 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc\": container with ID starting with 34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc not found: ID does not exist" containerID="34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.127689 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc"} err="failed to get container status \"34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc\": rpc error: code = NotFound desc = could not find container \"34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc\": container with ID starting with 34414d63fbd63a37ba29a5e2f36f9624e5ded46d9bb3ccf03a2acda16dbb55dc not found: ID does not exist" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.127816 4611 scope.go:117] "RemoveContainer" containerID="c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362" Sep 29 12:44:45 crc kubenswrapper[4611]: E0929 12:44:45.129331 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362\": container with ID starting with c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362 not found: ID does not exist" containerID="c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.129473 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362"} err="failed to get container status \"c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362\": rpc error: code = NotFound desc = could not find container \"c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362\": container with ID starting with c2e8ef6de15f16f1ddc74e6a9097896f591a91a13c6bb06d92b5acbec727f362 not found: ID does not exist" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.129584 4611 scope.go:117] "RemoveContainer" containerID="ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e" Sep 29 12:44:45 crc kubenswrapper[4611]: E0929 12:44:45.138152 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e\": container with ID starting with ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e not found: ID does not exist" containerID="ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.138438 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e"} err="failed to get container status \"ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e\": rpc error: code = NotFound desc = could not find container \"ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e\": container with ID starting with ae34180a65187afa1b72d81763d539268e087eabdc5bfda162a807d1e9d5f93e not found: ID does not exist" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.138595 4611 scope.go:117] "RemoveContainer" containerID="ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.152295 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v58w2"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.167922 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v58w2"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.178097 4611 scope.go:117] "RemoveContainer" containerID="e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.188681 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd7t4"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.189010 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd7t4"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.271261 4611 scope.go:117] "RemoveContainer" containerID="9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.284209 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7s6xd"] Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.288248 4611 scope.go:117] "RemoveContainer" containerID="ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31" Sep 29 12:44:45 crc kubenswrapper[4611]: E0929 12:44:45.288640 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31\": container with ID starting with ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31 not found: ID does not exist" containerID="ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.288671 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31"} err="failed to get container status \"ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31\": rpc error: code = NotFound desc = could not find container \"ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31\": container with ID starting with ee2de6bc094b117fce9cb208abcdd9eac879d7aea66f105423799fc451286c31 not found: ID does not exist" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.288690 4611 scope.go:117] "RemoveContainer" containerID="e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb" Sep 29 12:44:45 crc kubenswrapper[4611]: W0929 12:44:45.292788 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod471e6196_70f6_4f58_b544_aec3c50ec4b7.slice/crio-c258adfc912f72d1131342bea76803687c439f21fc05c9b275ee816eb85cd307 WatchSource:0}: Error finding container c258adfc912f72d1131342bea76803687c439f21fc05c9b275ee816eb85cd307: Status 404 returned error can't find the container with id c258adfc912f72d1131342bea76803687c439f21fc05c9b275ee816eb85cd307 Sep 29 12:44:45 crc kubenswrapper[4611]: E0929 12:44:45.292849 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb\": container with ID starting with e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb not found: ID does not exist" containerID="e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.292885 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb"} err="failed to get container status \"e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb\": rpc error: code = NotFound desc = could not find container \"e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb\": container with ID starting with e685e3aa12a25cfdaa4527dca1647370ad3021b7520bfc77b8e0cbba1c6fbbeb not found: ID does not exist" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.292909 4611 scope.go:117] "RemoveContainer" containerID="9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828" Sep 29 12:44:45 crc kubenswrapper[4611]: E0929 12:44:45.293432 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828\": container with ID starting with 9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828 not found: ID does not exist" containerID="9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.293461 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828"} err="failed to get container status \"9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828\": rpc error: code = NotFound desc = could not find container \"9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828\": container with ID starting with 9f0436ce05a4d7bd642b5083bf4723c3221fe266b497519bd3e9e529c3194828 not found: ID does not exist" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.742506 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" path="/var/lib/kubelet/pods/00b46678-03aa-4e70-af99-c622eb2b2508/volumes" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.743359 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" path="/var/lib/kubelet/pods/264d8c44-48fe-46fa-a3c9-d0df2dc4837a/volumes" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.744002 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" path="/var/lib/kubelet/pods/2b22af10-abdb-4a44-bbb1-aa53980a366a/volumes" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.745098 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" path="/var/lib/kubelet/pods/50f7035c-f8b1-40ec-b2e9-fc3f470eec0e/volumes" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.745728 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" path="/var/lib/kubelet/pods/d4fbaa17-e5ab-45cd-ba85-23cc115b07c0/volumes" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.804598 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" event={"ID":"471e6196-70f6-4f58-b544-aec3c50ec4b7","Type":"ContainerStarted","Data":"d6f6e74ae7b9d9c5c14a592c561d8354e611a1a4056420e94ba199a652df7594"} Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.804938 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" event={"ID":"471e6196-70f6-4f58-b544-aec3c50ec4b7","Type":"ContainerStarted","Data":"c258adfc912f72d1131342bea76803687c439f21fc05c9b275ee816eb85cd307"} Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.805412 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.807421 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" Sep 29 12:44:45 crc kubenswrapper[4611]: I0929 12:44:45.832473 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7s6xd" podStartSLOduration=1.832458922 podStartE2EDuration="1.832458922s" podCreationTimestamp="2025-09-29 12:44:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:44:45.831823384 +0000 UTC m=+272.723342990" watchObservedRunningTime="2025-09-29 12:44:45.832458922 +0000 UTC m=+272.723978528" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113309 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fghqn"] Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113745 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113757 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113769 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113774 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113782 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113787 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113794 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113799 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113810 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113816 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113823 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113828 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113844 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113849 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113856 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113861 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113869 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" containerName="marketplace-operator" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113875 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" containerName="marketplace-operator" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113884 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113889 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113896 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113901 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113910 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113916 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="extract-content" Sep 29 12:44:46 crc kubenswrapper[4611]: E0929 12:44:46.113924 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.113929 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="extract-utilities" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.114007 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fbaa17-e5ab-45cd-ba85-23cc115b07c0" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.114019 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="00b46678-03aa-4e70-af99-c622eb2b2508" containerName="marketplace-operator" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.114027 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b22af10-abdb-4a44-bbb1-aa53980a366a" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.114035 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="264d8c44-48fe-46fa-a3c9-d0df2dc4837a" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.114042 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="50f7035c-f8b1-40ec-b2e9-fc3f470eec0e" containerName="registry-server" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.114657 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.118980 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.135146 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fghqn"] Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.189826 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96lp6\" (UniqueName: \"kubernetes.io/projected/7f715f99-dbbf-4748-92eb-cd643708ff81-kube-api-access-96lp6\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.189869 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-catalog-content\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.189885 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-utilities\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.291214 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96lp6\" (UniqueName: \"kubernetes.io/projected/7f715f99-dbbf-4748-92eb-cd643708ff81-kube-api-access-96lp6\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.291264 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-catalog-content\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.291280 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-utilities\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.291740 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-utilities\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.292131 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-catalog-content\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.313721 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96lp6\" (UniqueName: \"kubernetes.io/projected/7f715f99-dbbf-4748-92eb-cd643708ff81-kube-api-access-96lp6\") pod \"certified-operators-fghqn\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.446927 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.663759 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fghqn"] Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.713518 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l98cw"] Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.714452 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.716017 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.722109 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l98cw"] Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.802317 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77747050-735f-46d8-a725-dfc31764b0e7-utilities\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.802388 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77747050-735f-46d8-a725-dfc31764b0e7-catalog-content\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.802430 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vftk\" (UniqueName: \"kubernetes.io/projected/77747050-735f-46d8-a725-dfc31764b0e7-kube-api-access-5vftk\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.820317 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fghqn" event={"ID":"7f715f99-dbbf-4748-92eb-cd643708ff81","Type":"ContainerStarted","Data":"0de4406842d10d90c2bbe3688c96cea16ce11824a3b0acd97879566b67a74574"} Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.904005 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77747050-735f-46d8-a725-dfc31764b0e7-utilities\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.904596 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77747050-735f-46d8-a725-dfc31764b0e7-catalog-content\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.904729 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vftk\" (UniqueName: \"kubernetes.io/projected/77747050-735f-46d8-a725-dfc31764b0e7-kube-api-access-5vftk\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.905917 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77747050-735f-46d8-a725-dfc31764b0e7-catalog-content\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.906301 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77747050-735f-46d8-a725-dfc31764b0e7-utilities\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:46 crc kubenswrapper[4611]: I0929 12:44:46.930117 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vftk\" (UniqueName: \"kubernetes.io/projected/77747050-735f-46d8-a725-dfc31764b0e7-kube-api-access-5vftk\") pod \"redhat-marketplace-l98cw\" (UID: \"77747050-735f-46d8-a725-dfc31764b0e7\") " pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.056531 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.281857 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l98cw"] Sep 29 12:44:47 crc kubenswrapper[4611]: W0929 12:44:47.290823 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77747050_735f_46d8_a725_dfc31764b0e7.slice/crio-859eee82da3f5721bde7ebd1fa3f5f36d71b0b3b4c47b8c6eaa622a1d3eb5565 WatchSource:0}: Error finding container 859eee82da3f5721bde7ebd1fa3f5f36d71b0b3b4c47b8c6eaa622a1d3eb5565: Status 404 returned error can't find the container with id 859eee82da3f5721bde7ebd1fa3f5f36d71b0b3b4c47b8c6eaa622a1d3eb5565 Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.833398 4611 generic.go:334] "Generic (PLEG): container finished" podID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerID="5a08e73e4f739cd68950613e851ef98739bd3d8feabbb298780e54ca438d440a" exitCode=0 Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.833884 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fghqn" event={"ID":"7f715f99-dbbf-4748-92eb-cd643708ff81","Type":"ContainerDied","Data":"5a08e73e4f739cd68950613e851ef98739bd3d8feabbb298780e54ca438d440a"} Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.836591 4611 generic.go:334] "Generic (PLEG): container finished" podID="77747050-735f-46d8-a725-dfc31764b0e7" containerID="2a032de7ac8417db422ae121ee19fc46d72cbe2bc2b57b4b7d24360c47ec81bf" exitCode=0 Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.836716 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l98cw" event={"ID":"77747050-735f-46d8-a725-dfc31764b0e7","Type":"ContainerDied","Data":"2a032de7ac8417db422ae121ee19fc46d72cbe2bc2b57b4b7d24360c47ec81bf"} Sep 29 12:44:47 crc kubenswrapper[4611]: I0929 12:44:47.836787 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l98cw" event={"ID":"77747050-735f-46d8-a725-dfc31764b0e7","Type":"ContainerStarted","Data":"859eee82da3f5721bde7ebd1fa3f5f36d71b0b3b4c47b8c6eaa622a1d3eb5565"} Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.511305 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c6d4c"] Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.512534 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.515229 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.523744 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c6d4c"] Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.531538 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glcws\" (UniqueName: \"kubernetes.io/projected/3413e896-e275-4fab-81c4-c03efb31fcc6-kube-api-access-glcws\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.531586 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3413e896-e275-4fab-81c4-c03efb31fcc6-utilities\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.531681 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3413e896-e275-4fab-81c4-c03efb31fcc6-catalog-content\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.632999 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3413e896-e275-4fab-81c4-c03efb31fcc6-catalog-content\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.633314 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glcws\" (UniqueName: \"kubernetes.io/projected/3413e896-e275-4fab-81c4-c03efb31fcc6-kube-api-access-glcws\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.633342 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3413e896-e275-4fab-81c4-c03efb31fcc6-utilities\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.633831 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3413e896-e275-4fab-81c4-c03efb31fcc6-utilities\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.633995 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3413e896-e275-4fab-81c4-c03efb31fcc6-catalog-content\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.658670 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glcws\" (UniqueName: \"kubernetes.io/projected/3413e896-e275-4fab-81c4-c03efb31fcc6-kube-api-access-glcws\") pod \"redhat-operators-c6d4c\" (UID: \"3413e896-e275-4fab-81c4-c03efb31fcc6\") " pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.844484 4611 generic.go:334] "Generic (PLEG): container finished" podID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerID="1754eb1ac9d87a1211feef8668f275a7cb6b889c432b76748884cc6ba1c12556" exitCode=0 Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.844565 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fghqn" event={"ID":"7f715f99-dbbf-4748-92eb-cd643708ff81","Type":"ContainerDied","Data":"1754eb1ac9d87a1211feef8668f275a7cb6b889c432b76748884cc6ba1c12556"} Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.848454 4611 generic.go:334] "Generic (PLEG): container finished" podID="77747050-735f-46d8-a725-dfc31764b0e7" containerID="e533a6bcfb10aa1b1f022901763884157c1004cad47541e9abd6dbaea25196e7" exitCode=0 Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.848532 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l98cw" event={"ID":"77747050-735f-46d8-a725-dfc31764b0e7","Type":"ContainerDied","Data":"e533a6bcfb10aa1b1f022901763884157c1004cad47541e9abd6dbaea25196e7"} Sep 29 12:44:48 crc kubenswrapper[4611]: I0929 12:44:48.873702 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.061601 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c6d4c"] Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.110291 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kfmlj"] Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.111471 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.114691 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.119714 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kfmlj"] Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.140532 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cdsl\" (UniqueName: \"kubernetes.io/projected/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-kube-api-access-4cdsl\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.140610 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-catalog-content\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.140794 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-utilities\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.242504 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-utilities\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.242574 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cdsl\" (UniqueName: \"kubernetes.io/projected/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-kube-api-access-4cdsl\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.242601 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-catalog-content\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.243126 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-utilities\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.243183 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-catalog-content\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.263578 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cdsl\" (UniqueName: \"kubernetes.io/projected/1dad5bc8-c0f2-437f-82f1-d516c6738eeb-kube-api-access-4cdsl\") pod \"community-operators-kfmlj\" (UID: \"1dad5bc8-c0f2-437f-82f1-d516c6738eeb\") " pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.509464 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.860498 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fghqn" event={"ID":"7f715f99-dbbf-4748-92eb-cd643708ff81","Type":"ContainerStarted","Data":"51ab4924306d5123b3e833fb4afa6147b266cc9926266f4bbd239122ff02ac26"} Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.865740 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l98cw" event={"ID":"77747050-735f-46d8-a725-dfc31764b0e7","Type":"ContainerStarted","Data":"6438a14c07626b10e95cb6ccc000b1ddb2256107edcb1d35af58bbf3ff5d66be"} Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.867096 4611 generic.go:334] "Generic (PLEG): container finished" podID="3413e896-e275-4fab-81c4-c03efb31fcc6" containerID="e96921d278d756838fa8bd7a90d3b622cb103cfe1ae3556113614a2b95625654" exitCode=0 Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.867131 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6d4c" event={"ID":"3413e896-e275-4fab-81c4-c03efb31fcc6","Type":"ContainerDied","Data":"e96921d278d756838fa8bd7a90d3b622cb103cfe1ae3556113614a2b95625654"} Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.867152 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6d4c" event={"ID":"3413e896-e275-4fab-81c4-c03efb31fcc6","Type":"ContainerStarted","Data":"ea9511943ab8fc9eeabecd9fde3181a4bb2c5271aec6005de75f21587c0f13b2"} Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.885346 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fghqn" podStartSLOduration=2.401540517 podStartE2EDuration="3.885324887s" podCreationTimestamp="2025-09-29 12:44:46 +0000 UTC" firstStartedPulling="2025-09-29 12:44:47.836209356 +0000 UTC m=+274.727728962" lastFinishedPulling="2025-09-29 12:44:49.319993726 +0000 UTC m=+276.211513332" observedRunningTime="2025-09-29 12:44:49.883535996 +0000 UTC m=+276.775055602" watchObservedRunningTime="2025-09-29 12:44:49.885324887 +0000 UTC m=+276.776844493" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.908288 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l98cw" podStartSLOduration=2.467838668 podStartE2EDuration="3.908267438s" podCreationTimestamp="2025-09-29 12:44:46 +0000 UTC" firstStartedPulling="2025-09-29 12:44:47.840889149 +0000 UTC m=+274.732408915" lastFinishedPulling="2025-09-29 12:44:49.281318089 +0000 UTC m=+276.172837685" observedRunningTime="2025-09-29 12:44:49.906670982 +0000 UTC m=+276.798190598" watchObservedRunningTime="2025-09-29 12:44:49.908267438 +0000 UTC m=+276.799787044" Sep 29 12:44:49 crc kubenswrapper[4611]: I0929 12:44:49.959009 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kfmlj"] Sep 29 12:44:50 crc kubenswrapper[4611]: I0929 12:44:50.873548 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6d4c" event={"ID":"3413e896-e275-4fab-81c4-c03efb31fcc6","Type":"ContainerStarted","Data":"eecef56ecc7281a2a2a7ee9371f5daa4870e1f03d8b959dd707516a3e9119d31"} Sep 29 12:44:50 crc kubenswrapper[4611]: I0929 12:44:50.876164 4611 generic.go:334] "Generic (PLEG): container finished" podID="1dad5bc8-c0f2-437f-82f1-d516c6738eeb" containerID="5d91815d7f5f39c829501e3ef5e93365ded23f624f03ec33066d069f02d25d23" exitCode=0 Sep 29 12:44:50 crc kubenswrapper[4611]: I0929 12:44:50.876266 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfmlj" event={"ID":"1dad5bc8-c0f2-437f-82f1-d516c6738eeb","Type":"ContainerDied","Data":"5d91815d7f5f39c829501e3ef5e93365ded23f624f03ec33066d069f02d25d23"} Sep 29 12:44:50 crc kubenswrapper[4611]: I0929 12:44:50.876313 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfmlj" event={"ID":"1dad5bc8-c0f2-437f-82f1-d516c6738eeb","Type":"ContainerStarted","Data":"98989cca984c2093997c1b690c88012ee4bbc9e5ff29408fc4aa641cb26bd724"} Sep 29 12:44:51 crc kubenswrapper[4611]: I0929 12:44:51.882947 4611 generic.go:334] "Generic (PLEG): container finished" podID="3413e896-e275-4fab-81c4-c03efb31fcc6" containerID="eecef56ecc7281a2a2a7ee9371f5daa4870e1f03d8b959dd707516a3e9119d31" exitCode=0 Sep 29 12:44:51 crc kubenswrapper[4611]: I0929 12:44:51.883051 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6d4c" event={"ID":"3413e896-e275-4fab-81c4-c03efb31fcc6","Type":"ContainerDied","Data":"eecef56ecc7281a2a2a7ee9371f5daa4870e1f03d8b959dd707516a3e9119d31"} Sep 29 12:44:54 crc kubenswrapper[4611]: I0929 12:44:53.895878 4611 generic.go:334] "Generic (PLEG): container finished" podID="1dad5bc8-c0f2-437f-82f1-d516c6738eeb" containerID="621436309f3166faf9426fff5935f5df776b151913b145b835fb4915ff64fefe" exitCode=0 Sep 29 12:44:54 crc kubenswrapper[4611]: I0929 12:44:53.896029 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfmlj" event={"ID":"1dad5bc8-c0f2-437f-82f1-d516c6738eeb","Type":"ContainerDied","Data":"621436309f3166faf9426fff5935f5df776b151913b145b835fb4915ff64fefe"} Sep 29 12:44:54 crc kubenswrapper[4611]: I0929 12:44:53.902727 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6d4c" event={"ID":"3413e896-e275-4fab-81c4-c03efb31fcc6","Type":"ContainerStarted","Data":"3d8ffca78ebb060b1236a05aba270d1fc88146832b3e188f12852299f2c29f29"} Sep 29 12:44:54 crc kubenswrapper[4611]: I0929 12:44:53.933139 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c6d4c" podStartSLOduration=3.332155787 podStartE2EDuration="5.933118287s" podCreationTimestamp="2025-09-29 12:44:48 +0000 UTC" firstStartedPulling="2025-09-29 12:44:49.868646873 +0000 UTC m=+276.760166479" lastFinishedPulling="2025-09-29 12:44:52.469609373 +0000 UTC m=+279.361128979" observedRunningTime="2025-09-29 12:44:53.930270506 +0000 UTC m=+280.821790122" watchObservedRunningTime="2025-09-29 12:44:53.933118287 +0000 UTC m=+280.824637893" Sep 29 12:44:54 crc kubenswrapper[4611]: I0929 12:44:54.909597 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfmlj" event={"ID":"1dad5bc8-c0f2-437f-82f1-d516c6738eeb","Type":"ContainerStarted","Data":"86ff7f7c74e4720c5de5c0c3626bf9565626d0afeba9180b64e14563de0b6992"} Sep 29 12:44:56 crc kubenswrapper[4611]: I0929 12:44:56.447706 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:56 crc kubenswrapper[4611]: I0929 12:44:56.448096 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:56 crc kubenswrapper[4611]: I0929 12:44:56.488690 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:56 crc kubenswrapper[4611]: I0929 12:44:56.507491 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kfmlj" podStartSLOduration=3.98970859 podStartE2EDuration="7.507475321s" podCreationTimestamp="2025-09-29 12:44:49 +0000 UTC" firstStartedPulling="2025-09-29 12:44:50.87824699 +0000 UTC m=+277.769766596" lastFinishedPulling="2025-09-29 12:44:54.396013721 +0000 UTC m=+281.287533327" observedRunningTime="2025-09-29 12:44:54.928050787 +0000 UTC m=+281.819570393" watchObservedRunningTime="2025-09-29 12:44:56.507475321 +0000 UTC m=+283.398994927" Sep 29 12:44:56 crc kubenswrapper[4611]: I0929 12:44:56.956769 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 12:44:57 crc kubenswrapper[4611]: I0929 12:44:57.056985 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:57 crc kubenswrapper[4611]: I0929 12:44:57.059661 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:57 crc kubenswrapper[4611]: I0929 12:44:57.098816 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:57 crc kubenswrapper[4611]: I0929 12:44:57.963749 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l98cw" Sep 29 12:44:58 crc kubenswrapper[4611]: I0929 12:44:58.874229 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:58 crc kubenswrapper[4611]: I0929 12:44:58.874573 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:58 crc kubenswrapper[4611]: I0929 12:44:58.912098 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:58 crc kubenswrapper[4611]: I0929 12:44:58.963379 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c6d4c" Sep 29 12:44:59 crc kubenswrapper[4611]: I0929 12:44:59.510264 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:59 crc kubenswrapper[4611]: I0929 12:44:59.510306 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:59 crc kubenswrapper[4611]: I0929 12:44:59.549520 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:44:59 crc kubenswrapper[4611]: I0929 12:44:59.998364 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kfmlj" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.159349 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2"] Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.160044 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.162847 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.163100 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.178145 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7e409f2-7bd4-450d-8e97-c0020dc091c4-secret-volume\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.178209 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxmm7\" (UniqueName: \"kubernetes.io/projected/e7e409f2-7bd4-450d-8e97-c0020dc091c4-kube-api-access-lxmm7\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.178254 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7e409f2-7bd4-450d-8e97-c0020dc091c4-config-volume\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.181908 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2"] Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.279226 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7e409f2-7bd4-450d-8e97-c0020dc091c4-secret-volume\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.279290 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxmm7\" (UniqueName: \"kubernetes.io/projected/e7e409f2-7bd4-450d-8e97-c0020dc091c4-kube-api-access-lxmm7\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.279328 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7e409f2-7bd4-450d-8e97-c0020dc091c4-config-volume\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.280459 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7e409f2-7bd4-450d-8e97-c0020dc091c4-config-volume\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.284876 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7e409f2-7bd4-450d-8e97-c0020dc091c4-secret-volume\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.308365 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxmm7\" (UniqueName: \"kubernetes.io/projected/e7e409f2-7bd4-450d-8e97-c0020dc091c4-kube-api-access-lxmm7\") pod \"collect-profiles-29319165-9r7j2\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.474801 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:00 crc kubenswrapper[4611]: I0929 12:45:00.976048 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2"] Sep 29 12:45:00 crc kubenswrapper[4611]: W0929 12:45:00.985146 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7e409f2_7bd4_450d_8e97_c0020dc091c4.slice/crio-1bb44770b822c8121821baf679ceffe79a4e346d5e662e3f7ae8137b471e9d39 WatchSource:0}: Error finding container 1bb44770b822c8121821baf679ceffe79a4e346d5e662e3f7ae8137b471e9d39: Status 404 returned error can't find the container with id 1bb44770b822c8121821baf679ceffe79a4e346d5e662e3f7ae8137b471e9d39 Sep 29 12:45:01 crc kubenswrapper[4611]: I0929 12:45:01.945458 4611 generic.go:334] "Generic (PLEG): container finished" podID="e7e409f2-7bd4-450d-8e97-c0020dc091c4" containerID="0e0972f99474d164cd71916c89cf314d197b5221b6eb95e7f28211e396b85a50" exitCode=0 Sep 29 12:45:01 crc kubenswrapper[4611]: I0929 12:45:01.945671 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" event={"ID":"e7e409f2-7bd4-450d-8e97-c0020dc091c4","Type":"ContainerDied","Data":"0e0972f99474d164cd71916c89cf314d197b5221b6eb95e7f28211e396b85a50"} Sep 29 12:45:01 crc kubenswrapper[4611]: I0929 12:45:01.945851 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" event={"ID":"e7e409f2-7bd4-450d-8e97-c0020dc091c4","Type":"ContainerStarted","Data":"1bb44770b822c8121821baf679ceffe79a4e346d5e662e3f7ae8137b471e9d39"} Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.180267 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.235960 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7e409f2-7bd4-450d-8e97-c0020dc091c4-secret-volume\") pod \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.236034 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7e409f2-7bd4-450d-8e97-c0020dc091c4-config-volume\") pod \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.236076 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxmm7\" (UniqueName: \"kubernetes.io/projected/e7e409f2-7bd4-450d-8e97-c0020dc091c4-kube-api-access-lxmm7\") pod \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\" (UID: \"e7e409f2-7bd4-450d-8e97-c0020dc091c4\") " Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.237480 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e409f2-7bd4-450d-8e97-c0020dc091c4-config-volume" (OuterVolumeSpecName: "config-volume") pod "e7e409f2-7bd4-450d-8e97-c0020dc091c4" (UID: "e7e409f2-7bd4-450d-8e97-c0020dc091c4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.242578 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e409f2-7bd4-450d-8e97-c0020dc091c4-kube-api-access-lxmm7" (OuterVolumeSpecName: "kube-api-access-lxmm7") pod "e7e409f2-7bd4-450d-8e97-c0020dc091c4" (UID: "e7e409f2-7bd4-450d-8e97-c0020dc091c4"). InnerVolumeSpecName "kube-api-access-lxmm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.242694 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e409f2-7bd4-450d-8e97-c0020dc091c4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e7e409f2-7bd4-450d-8e97-c0020dc091c4" (UID: "e7e409f2-7bd4-450d-8e97-c0020dc091c4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.337027 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7e409f2-7bd4-450d-8e97-c0020dc091c4-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.337064 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7e409f2-7bd4-450d-8e97-c0020dc091c4-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.337076 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxmm7\" (UniqueName: \"kubernetes.io/projected/e7e409f2-7bd4-450d-8e97-c0020dc091c4-kube-api-access-lxmm7\") on node \"crc\" DevicePath \"\"" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.955108 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" event={"ID":"e7e409f2-7bd4-450d-8e97-c0020dc091c4","Type":"ContainerDied","Data":"1bb44770b822c8121821baf679ceffe79a4e346d5e662e3f7ae8137b471e9d39"} Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.955151 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bb44770b822c8121821baf679ceffe79a4e346d5e662e3f7ae8137b471e9d39" Sep 29 12:45:03 crc kubenswrapper[4611]: I0929 12:45:03.955162 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2" Sep 29 12:46:34 crc kubenswrapper[4611]: I0929 12:46:34.628459 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:46:34 crc kubenswrapper[4611]: I0929 12:46:34.629050 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:47:04 crc kubenswrapper[4611]: I0929 12:47:04.628857 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:47:04 crc kubenswrapper[4611]: I0929 12:47:04.629406 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:47:34 crc kubenswrapper[4611]: I0929 12:47:34.629253 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:47:34 crc kubenswrapper[4611]: I0929 12:47:34.629875 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:47:34 crc kubenswrapper[4611]: I0929 12:47:34.629927 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:47:34 crc kubenswrapper[4611]: I0929 12:47:34.630922 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"02c1cf1e6d77a414178e7c5de630b700218c10ae6c54f2823aa24b1d4995bcf3"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 12:47:34 crc kubenswrapper[4611]: I0929 12:47:34.630986 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://02c1cf1e6d77a414178e7c5de630b700218c10ae6c54f2823aa24b1d4995bcf3" gracePeriod=600 Sep 29 12:47:35 crc kubenswrapper[4611]: I0929 12:47:35.695868 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="02c1cf1e6d77a414178e7c5de630b700218c10ae6c54f2823aa24b1d4995bcf3" exitCode=0 Sep 29 12:47:35 crc kubenswrapper[4611]: I0929 12:47:35.695941 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"02c1cf1e6d77a414178e7c5de630b700218c10ae6c54f2823aa24b1d4995bcf3"} Sep 29 12:47:35 crc kubenswrapper[4611]: I0929 12:47:35.696314 4611 scope.go:117] "RemoveContainer" containerID="5b0ff3346519adfdb0a00c063e2fd8b0001621bf63731b22500e17de12634f5d" Sep 29 12:47:35 crc kubenswrapper[4611]: I0929 12:47:35.696179 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"49931f0be5e603c2daaa8ecf2f4e39aef9cf15176e498454b5a286b9aedf81bb"} Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.179064 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-smmmk"] Sep 29 12:47:40 crc kubenswrapper[4611]: E0929 12:47:40.179818 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7e409f2-7bd4-450d-8e97-c0020dc091c4" containerName="collect-profiles" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.179831 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7e409f2-7bd4-450d-8e97-c0020dc091c4" containerName="collect-profiles" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.179931 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7e409f2-7bd4-450d-8e97-c0020dc091c4" containerName="collect-profiles" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.180353 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.205308 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-smmmk"] Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361106 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-registry-tls\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361337 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7ntg\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-kube-api-access-x7ntg\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361427 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/461675f1-cd28-4676-afa7-086f639b1208-trusted-ca\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361525 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/461675f1-cd28-4676-afa7-086f639b1208-installation-pull-secrets\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361667 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/461675f1-cd28-4676-afa7-086f639b1208-registry-certificates\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361783 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361860 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/461675f1-cd28-4676-afa7-086f639b1208-ca-trust-extracted\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.361936 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-bound-sa-token\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.384787 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463212 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/461675f1-cd28-4676-afa7-086f639b1208-installation-pull-secrets\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463274 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/461675f1-cd28-4676-afa7-086f639b1208-registry-certificates\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463326 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/461675f1-cd28-4676-afa7-086f639b1208-ca-trust-extracted\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463355 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-bound-sa-token\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463378 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-registry-tls\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463418 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7ntg\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-kube-api-access-x7ntg\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463447 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/461675f1-cd28-4676-afa7-086f639b1208-trusted-ca\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.463933 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/461675f1-cd28-4676-afa7-086f639b1208-ca-trust-extracted\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.464861 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/461675f1-cd28-4676-afa7-086f639b1208-trusted-ca\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.464954 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/461675f1-cd28-4676-afa7-086f639b1208-registry-certificates\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.469124 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/461675f1-cd28-4676-afa7-086f639b1208-installation-pull-secrets\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.469166 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-registry-tls\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.483371 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-bound-sa-token\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.487274 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7ntg\" (UniqueName: \"kubernetes.io/projected/461675f1-cd28-4676-afa7-086f639b1208-kube-api-access-x7ntg\") pod \"image-registry-66df7c8f76-smmmk\" (UID: \"461675f1-cd28-4676-afa7-086f639b1208\") " pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.498210 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:40 crc kubenswrapper[4611]: I0929 12:47:40.874610 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-smmmk"] Sep 29 12:47:41 crc kubenswrapper[4611]: I0929 12:47:41.729166 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" event={"ID":"461675f1-cd28-4676-afa7-086f639b1208","Type":"ContainerStarted","Data":"ad79ac305d857a1379577c271d9ff1f033ab1b20cd0164b0c54f3b173a1b935b"} Sep 29 12:47:41 crc kubenswrapper[4611]: I0929 12:47:41.729663 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:47:41 crc kubenswrapper[4611]: I0929 12:47:41.729724 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" event={"ID":"461675f1-cd28-4676-afa7-086f639b1208","Type":"ContainerStarted","Data":"baaa8473bde469a447e9d82f97cbbb57be123a0fc808982505e5222dfcdb3298"} Sep 29 12:47:41 crc kubenswrapper[4611]: I0929 12:47:41.749577 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" podStartSLOduration=1.749560102 podStartE2EDuration="1.749560102s" podCreationTimestamp="2025-09-29 12:47:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:47:41.74811912 +0000 UTC m=+448.639638726" watchObservedRunningTime="2025-09-29 12:47:41.749560102 +0000 UTC m=+448.641079708" Sep 29 12:48:00 crc kubenswrapper[4611]: I0929 12:48:00.503095 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-smmmk" Sep 29 12:48:00 crc kubenswrapper[4611]: I0929 12:48:00.561370 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nt8jb"] Sep 29 12:48:25 crc kubenswrapper[4611]: I0929 12:48:25.598795 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" podUID="21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" containerName="registry" containerID="cri-o://21198cc8a170ee3b97a66ddef1dbb989a6b3ef1fba27b25183b2e1234c4e72ef" gracePeriod=30 Sep 29 12:48:25 crc kubenswrapper[4611]: I0929 12:48:25.957106 4611 generic.go:334] "Generic (PLEG): container finished" podID="21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" containerID="21198cc8a170ee3b97a66ddef1dbb989a6b3ef1fba27b25183b2e1234c4e72ef" exitCode=0 Sep 29 12:48:25 crc kubenswrapper[4611]: I0929 12:48:25.957207 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" event={"ID":"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23","Type":"ContainerDied","Data":"21198cc8a170ee3b97a66ddef1dbb989a6b3ef1fba27b25183b2e1234c4e72ef"} Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.425460 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573214 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573314 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-installation-pull-secrets\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573342 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmp6s\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-kube-api-access-cmp6s\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573379 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-bound-sa-token\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573424 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-certificates\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573450 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-tls\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573479 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-trusted-ca\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.573515 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-ca-trust-extracted\") pod \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\" (UID: \"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23\") " Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.574657 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.575773 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.579533 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-kube-api-access-cmp6s" (OuterVolumeSpecName: "kube-api-access-cmp6s") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "kube-api-access-cmp6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.579571 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.581440 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.583181 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.583516 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.592016 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" (UID: "21d82b2d-3a11-4e88-9b00-cb9a9a62fa23"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675226 4611 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675268 4611 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675280 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmp6s\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-kube-api-access-cmp6s\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675290 4611 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675298 4611 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675307 4611 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.675314 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.963614 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" event={"ID":"21d82b2d-3a11-4e88-9b00-cb9a9a62fa23","Type":"ContainerDied","Data":"059bccec51b0521c9cc6d67a4f4fde8ba8d93e468cebddb5074e60138330facd"} Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.963698 4611 scope.go:117] "RemoveContainer" containerID="21198cc8a170ee3b97a66ddef1dbb989a6b3ef1fba27b25183b2e1234c4e72ef" Sep 29 12:48:26 crc kubenswrapper[4611]: I0929 12:48:26.963661 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nt8jb" Sep 29 12:48:27 crc kubenswrapper[4611]: I0929 12:48:27.005322 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nt8jb"] Sep 29 12:48:27 crc kubenswrapper[4611]: I0929 12:48:27.013677 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nt8jb"] Sep 29 12:48:27 crc kubenswrapper[4611]: I0929 12:48:27.742374 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" path="/var/lib/kubelet/pods/21d82b2d-3a11-4e88-9b00-cb9a9a62fa23/volumes" Sep 29 12:49:34 crc kubenswrapper[4611]: I0929 12:49:34.628907 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:49:34 crc kubenswrapper[4611]: I0929 12:49:34.629376 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:50:04 crc kubenswrapper[4611]: I0929 12:50:04.628704 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:50:04 crc kubenswrapper[4611]: I0929 12:50:04.629331 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.219094 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-2flkk"] Sep 29 12:50:17 crc kubenswrapper[4611]: E0929 12:50:17.219931 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" containerName="registry" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.219944 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" containerName="registry" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.220048 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="21d82b2d-3a11-4e88-9b00-cb9a9a62fa23" containerName="registry" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.220505 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.223983 4611 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-7b8lz" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.229998 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.238858 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.243442 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-2flkk"] Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.254645 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-9pzh9"] Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.255752 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-9pzh9" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.257721 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-krmvg"] Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.258339 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:17 crc kubenswrapper[4611]: W0929 12:50:17.261037 4611 reflector.go:561] object-"cert-manager"/"cert-manager-webhook-dockercfg-d82lx": failed to list *v1.Secret: secrets "cert-manager-webhook-dockercfg-d82lx" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "cert-manager": no relationship found between node 'crc' and this object Sep 29 12:50:17 crc kubenswrapper[4611]: E0929 12:50:17.261087 4611 reflector.go:158] "Unhandled Error" err="object-\"cert-manager\"/\"cert-manager-webhook-dockercfg-d82lx\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-manager-webhook-dockercfg-d82lx\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"cert-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:50:17 crc kubenswrapper[4611]: W0929 12:50:17.261130 4611 reflector.go:561] object-"cert-manager"/"cert-manager-dockercfg-476gv": failed to list *v1.Secret: secrets "cert-manager-dockercfg-476gv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "cert-manager": no relationship found between node 'crc' and this object Sep 29 12:50:17 crc kubenswrapper[4611]: E0929 12:50:17.261144 4611 reflector.go:158] "Unhandled Error" err="object-\"cert-manager\"/\"cert-manager-dockercfg-476gv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-manager-dockercfg-476gv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"cert-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.277924 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-9pzh9"] Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.285803 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-krmvg"] Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.387035 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl58g\" (UniqueName: \"kubernetes.io/projected/9c6eb535-520e-4d74-b699-f2aa3b5b5d8c-kube-api-access-fl58g\") pod \"cert-manager-5b446d88c5-9pzh9\" (UID: \"9c6eb535-520e-4d74-b699-f2aa3b5b5d8c\") " pod="cert-manager/cert-manager-5b446d88c5-9pzh9" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.387085 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmrks\" (UniqueName: \"kubernetes.io/projected/dbaafa13-738a-4c17-a4d0-b52614e7ae87-kube-api-access-mmrks\") pod \"cert-manager-webhook-5655c58dd6-krmvg\" (UID: \"dbaafa13-738a-4c17-a4d0-b52614e7ae87\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.387149 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vcs4\" (UniqueName: \"kubernetes.io/projected/1c273b1f-5284-46b7-8167-05e3bcc66102-kube-api-access-6vcs4\") pod \"cert-manager-cainjector-7f985d654d-2flkk\" (UID: \"1c273b1f-5284-46b7-8167-05e3bcc66102\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.488872 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl58g\" (UniqueName: \"kubernetes.io/projected/9c6eb535-520e-4d74-b699-f2aa3b5b5d8c-kube-api-access-fl58g\") pod \"cert-manager-5b446d88c5-9pzh9\" (UID: \"9c6eb535-520e-4d74-b699-f2aa3b5b5d8c\") " pod="cert-manager/cert-manager-5b446d88c5-9pzh9" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.489256 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmrks\" (UniqueName: \"kubernetes.io/projected/dbaafa13-738a-4c17-a4d0-b52614e7ae87-kube-api-access-mmrks\") pod \"cert-manager-webhook-5655c58dd6-krmvg\" (UID: \"dbaafa13-738a-4c17-a4d0-b52614e7ae87\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.489420 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vcs4\" (UniqueName: \"kubernetes.io/projected/1c273b1f-5284-46b7-8167-05e3bcc66102-kube-api-access-6vcs4\") pod \"cert-manager-cainjector-7f985d654d-2flkk\" (UID: \"1c273b1f-5284-46b7-8167-05e3bcc66102\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.509817 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vcs4\" (UniqueName: \"kubernetes.io/projected/1c273b1f-5284-46b7-8167-05e3bcc66102-kube-api-access-6vcs4\") pod \"cert-manager-cainjector-7f985d654d-2flkk\" (UID: \"1c273b1f-5284-46b7-8167-05e3bcc66102\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.510604 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmrks\" (UniqueName: \"kubernetes.io/projected/dbaafa13-738a-4c17-a4d0-b52614e7ae87-kube-api-access-mmrks\") pod \"cert-manager-webhook-5655c58dd6-krmvg\" (UID: \"dbaafa13-738a-4c17-a4d0-b52614e7ae87\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.515560 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl58g\" (UniqueName: \"kubernetes.io/projected/9c6eb535-520e-4d74-b699-f2aa3b5b5d8c-kube-api-access-fl58g\") pod \"cert-manager-5b446d88c5-9pzh9\" (UID: \"9c6eb535-520e-4d74-b699-f2aa3b5b5d8c\") " pod="cert-manager/cert-manager-5b446d88c5-9pzh9" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.539374 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.760193 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-2flkk"] Sep 29 12:50:17 crc kubenswrapper[4611]: I0929 12:50:17.766370 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.546226 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" event={"ID":"1c273b1f-5284-46b7-8167-05e3bcc66102","Type":"ContainerStarted","Data":"ef47ca2fc0bc859322c43b9f6f856cf94a31f907d8426dc0afacc96c65b88740"} Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.573080 4611 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="cert-manager/cert-manager-5b446d88c5-9pzh9" secret="" err="failed to sync secret cache: timed out waiting for the condition" Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.573481 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-9pzh9" Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.578018 4611 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-476gv" Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.583452 4611 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" secret="" err="failed to sync secret cache: timed out waiting for the condition" Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.583534 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.784761 4611 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-d82lx" Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.913380 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-9pzh9"] Sep 29 12:50:18 crc kubenswrapper[4611]: I0929 12:50:18.971940 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-krmvg"] Sep 29 12:50:18 crc kubenswrapper[4611]: W0929 12:50:18.981732 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbaafa13_738a_4c17_a4d0_b52614e7ae87.slice/crio-b0de6fff0cd65d24f68d124336838f1b63adf10d2ac479c45ebbe21ecde6f805 WatchSource:0}: Error finding container b0de6fff0cd65d24f68d124336838f1b63adf10d2ac479c45ebbe21ecde6f805: Status 404 returned error can't find the container with id b0de6fff0cd65d24f68d124336838f1b63adf10d2ac479c45ebbe21ecde6f805 Sep 29 12:50:18 crc kubenswrapper[4611]: W0929 12:50:18.983687 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c6eb535_520e_4d74_b699_f2aa3b5b5d8c.slice/crio-035ff9de6a7950dd9b921dd6e17ef28062ed6f62bdccaca0887c3ee078464dea WatchSource:0}: Error finding container 035ff9de6a7950dd9b921dd6e17ef28062ed6f62bdccaca0887c3ee078464dea: Status 404 returned error can't find the container with id 035ff9de6a7950dd9b921dd6e17ef28062ed6f62bdccaca0887c3ee078464dea Sep 29 12:50:19 crc kubenswrapper[4611]: I0929 12:50:19.552806 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-9pzh9" event={"ID":"9c6eb535-520e-4d74-b699-f2aa3b5b5d8c","Type":"ContainerStarted","Data":"035ff9de6a7950dd9b921dd6e17ef28062ed6f62bdccaca0887c3ee078464dea"} Sep 29 12:50:19 crc kubenswrapper[4611]: I0929 12:50:19.555181 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" event={"ID":"dbaafa13-738a-4c17-a4d0-b52614e7ae87","Type":"ContainerStarted","Data":"b0de6fff0cd65d24f68d124336838f1b63adf10d2ac479c45ebbe21ecde6f805"} Sep 29 12:50:20 crc kubenswrapper[4611]: I0929 12:50:20.563330 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" event={"ID":"1c273b1f-5284-46b7-8167-05e3bcc66102","Type":"ContainerStarted","Data":"949c289bd6cd50a1d033c42dd3fbd000529593ebf5778d0cfdfe7a61ba6910c6"} Sep 29 12:50:22 crc kubenswrapper[4611]: I0929 12:50:22.575537 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-9pzh9" event={"ID":"9c6eb535-520e-4d74-b699-f2aa3b5b5d8c","Type":"ContainerStarted","Data":"f6cfead321f61122309d69c408e366912501d79d81b85e5f44c63a9916afed38"} Sep 29 12:50:22 crc kubenswrapper[4611]: I0929 12:50:22.577234 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" event={"ID":"dbaafa13-738a-4c17-a4d0-b52614e7ae87","Type":"ContainerStarted","Data":"916c433f55d36f01ef6e3960d54cf30c97ae4ab5ec81ae1d608de98f1623ee9b"} Sep 29 12:50:22 crc kubenswrapper[4611]: I0929 12:50:22.577703 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:22 crc kubenswrapper[4611]: I0929 12:50:22.591108 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-2flkk" podStartSLOduration=3.288897096 podStartE2EDuration="5.591093636s" podCreationTimestamp="2025-09-29 12:50:17 +0000 UTC" firstStartedPulling="2025-09-29 12:50:17.765934549 +0000 UTC m=+604.657454155" lastFinishedPulling="2025-09-29 12:50:20.068131089 +0000 UTC m=+606.959650695" observedRunningTime="2025-09-29 12:50:20.586850205 +0000 UTC m=+607.478369811" watchObservedRunningTime="2025-09-29 12:50:22.591093636 +0000 UTC m=+609.482613242" Sep 29 12:50:22 crc kubenswrapper[4611]: I0929 12:50:22.591959 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-9pzh9" podStartSLOduration=2.5515970660000002 podStartE2EDuration="5.591953311s" podCreationTimestamp="2025-09-29 12:50:17 +0000 UTC" firstStartedPulling="2025-09-29 12:50:18.986471137 +0000 UTC m=+605.877990743" lastFinishedPulling="2025-09-29 12:50:22.026827382 +0000 UTC m=+608.918346988" observedRunningTime="2025-09-29 12:50:22.590364335 +0000 UTC m=+609.481883941" watchObservedRunningTime="2025-09-29 12:50:22.591953311 +0000 UTC m=+609.483472917" Sep 29 12:50:22 crc kubenswrapper[4611]: I0929 12:50:22.617229 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" podStartSLOduration=2.572309825 podStartE2EDuration="5.617213881s" podCreationTimestamp="2025-09-29 12:50:17 +0000 UTC" firstStartedPulling="2025-09-29 12:50:18.983358737 +0000 UTC m=+605.874878343" lastFinishedPulling="2025-09-29 12:50:22.028262793 +0000 UTC m=+608.919782399" observedRunningTime="2025-09-29 12:50:22.615402369 +0000 UTC m=+609.506921985" watchObservedRunningTime="2025-09-29 12:50:22.617213881 +0000 UTC m=+609.508733487" Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.574313 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p95nv"] Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.574986 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-controller" containerID="cri-o://d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.575112 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.575158 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-node" containerID="cri-o://6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.575190 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-acl-logging" containerID="cri-o://48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.575168 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="northd" containerID="cri-o://96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.575427 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="sbdb" containerID="cri-o://43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.575516 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="nbdb" containerID="cri-o://54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" gracePeriod=30 Sep 29 12:50:27 crc kubenswrapper[4611]: I0929 12:50:27.605229 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" containerID="cri-o://ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" gracePeriod=30 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.360183 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/3.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.362350 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovn-acl-logging/0.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.362755 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovn-controller/0.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.363099 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417358 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6s7qr"] Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417613 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="northd" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417649 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="northd" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417681 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-node" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417689 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-node" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417702 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417712 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417724 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-acl-logging" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417731 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-acl-logging" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417738 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="sbdb" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417745 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="sbdb" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417759 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417766 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417775 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="nbdb" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417782 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="nbdb" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417791 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417799 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417809 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417816 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417824 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417831 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417844 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kubecfg-setup" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417852 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kubecfg-setup" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.417863 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417873 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417980 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-acl-logging" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417988 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="sbdb" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.417997 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418004 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="northd" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418014 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-node" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418023 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418031 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovn-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418043 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="nbdb" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418052 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418061 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418070 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418078 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.418210 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.418220 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerName="ovnkube-controller" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.419846 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534141 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-etc-openvswitch\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534189 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-var-lib-cni-networks-ovn-kubernetes\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534218 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-log-socket\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534247 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-kubelet\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534263 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-netns\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534255 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534292 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-config\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534314 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-ovn\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534329 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-env-overrides\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534336 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534344 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-bin\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534369 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534392 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-var-lib-openvswitch\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534397 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534410 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534420 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-node-log\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534437 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534462 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534468 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-script-lib\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534504 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-ovn-kubernetes\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534540 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-systemd-units\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534566 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovn-node-metrics-cert\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534600 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2pdp\" (UniqueName: \"kubernetes.io/projected/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-kube-api-access-j2pdp\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534635 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-netd\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534655 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-slash\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534673 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-openvswitch\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534694 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-systemd\") pod \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\" (UID: \"bfec2820-7242-4dd6-9fa5-4ebe161f99ba\") " Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534881 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-run-ovn-kubernetes\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534918 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovn-node-metrics-cert\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534936 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534942 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-systemd-units\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534976 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534999 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-slash" (OuterVolumeSpecName: "host-slash") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535029 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535204 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535233 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535252 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535272 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-node-log" (OuterVolumeSpecName: "node-log") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.534999 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535337 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovnkube-config\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535368 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-slash\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535387 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-node-log\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535410 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-log-socket\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535426 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kczxd\" (UniqueName: \"kubernetes.io/projected/9a5a732b-5b93-4b76-8131-25b1a39b0963-kube-api-access-kczxd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535451 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-ovn\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535481 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-etc-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535501 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-run-netns\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535518 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-cni-netd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535534 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovnkube-script-lib\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535559 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535576 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-env-overrides\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535595 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-kubelet\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535593 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-log-socket" (OuterVolumeSpecName: "log-socket") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535612 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-var-lib-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535736 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-systemd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535759 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535788 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-cni-bin\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535883 4611 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535898 4611 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535928 4611 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535946 4611 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535959 4611 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535972 4611 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535983 4611 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.535994 4611 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536003 4611 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536013 4611 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536022 4611 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536031 4611 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536041 4611 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536050 4611 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536060 4611 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536070 4611 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.536081 4611 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.540026 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-kube-api-access-j2pdp" (OuterVolumeSpecName: "kube-api-access-j2pdp") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "kube-api-access-j2pdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.540058 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.549260 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "bfec2820-7242-4dd6-9fa5-4ebe161f99ba" (UID: "bfec2820-7242-4dd6-9fa5-4ebe161f99ba"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.590763 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-krmvg" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.608359 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/2.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.608813 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/1.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.608856 4611 generic.go:334] "Generic (PLEG): container finished" podID="18731b4e-6360-4d87-b586-0a9dc6b5af1e" containerID="3019c59039b6ddebfa387398ba37323b792ff4c1ac4de148cceb69288d0121fe" exitCode=2 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.608915 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerDied","Data":"3019c59039b6ddebfa387398ba37323b792ff4c1ac4de148cceb69288d0121fe"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.608954 4611 scope.go:117] "RemoveContainer" containerID="8f6c17f38122e6f09130791f393def156f1820795ee9d39916e7f26517253389" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.609513 4611 scope.go:117] "RemoveContainer" containerID="3019c59039b6ddebfa387398ba37323b792ff4c1ac4de148cceb69288d0121fe" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.609794 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-csch6_openshift-multus(18731b4e-6360-4d87-b586-0a9dc6b5af1e)\"" pod="openshift-multus/multus-csch6" podUID="18731b4e-6360-4d87-b586-0a9dc6b5af1e" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.613918 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovnkube-controller/3.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.617348 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovn-acl-logging/0.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.617943 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p95nv_bfec2820-7242-4dd6-9fa5-4ebe161f99ba/ovn-controller/0.log" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618338 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" exitCode=0 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618364 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" exitCode=0 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618375 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" exitCode=0 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618384 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" exitCode=0 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618392 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" exitCode=0 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618402 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" exitCode=0 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618411 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" exitCode=143 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618421 4611 generic.go:334] "Generic (PLEG): container finished" podID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" exitCode=143 Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618443 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618471 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618487 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618499 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618513 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618525 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618538 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618551 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618558 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618566 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618573 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618579 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618586 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618593 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618600 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618606 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618615 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618704 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618713 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618721 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618729 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618736 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618742 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618749 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618757 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618764 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618771 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618780 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618791 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618799 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618806 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618814 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618821 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618829 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618835 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618843 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618849 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618856 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618866 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" event={"ID":"bfec2820-7242-4dd6-9fa5-4ebe161f99ba","Type":"ContainerDied","Data":"da67c5f3e62b4626745bbc5bc9f59d7e136368590f0ecd66fa748d21779fb1a0"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618877 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618885 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618892 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618897 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618903 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618909 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618916 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618922 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618929 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.618936 4611 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.619026 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p95nv" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.636738 4611 scope.go:117] "RemoveContainer" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637807 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-run-ovn-kubernetes\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637842 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovn-node-metrics-cert\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637870 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-systemd-units\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637900 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637912 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-run-ovn-kubernetes\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637930 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovnkube-config\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637954 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-node-log\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637977 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-slash\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.637975 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-systemd-units\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638004 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-log-socket\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638019 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kczxd\" (UniqueName: \"kubernetes.io/projected/9a5a732b-5b93-4b76-8131-25b1a39b0963-kube-api-access-kczxd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638039 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-ovn\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638059 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-etc-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638075 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-run-netns\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638091 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-cni-netd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638104 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovnkube-script-lib\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638147 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638170 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-env-overrides\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638191 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-kubelet\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638208 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-var-lib-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638230 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-systemd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638252 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-cni-bin\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638306 4611 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638333 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2pdp\" (UniqueName: \"kubernetes.io/projected/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-kube-api-access-j2pdp\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638345 4611 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfec2820-7242-4dd6-9fa5-4ebe161f99ba-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638393 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-cni-bin\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638435 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638473 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-cni-netd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638481 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-run-netns\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638535 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-kubelet\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638567 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-var-lib-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638594 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-systemd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638753 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-etc-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.638829 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-ovn\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.639025 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-env-overrides\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.639460 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovnkube-config\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.639513 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-host-slash\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.639536 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-node-log\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.639655 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-run-openvswitch\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.639691 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9a5a732b-5b93-4b76-8131-25b1a39b0963-log-socket\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.640595 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovnkube-script-lib\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.643101 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9a5a732b-5b93-4b76-8131-25b1a39b0963-ovn-node-metrics-cert\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.665665 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p95nv"] Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.667747 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kczxd\" (UniqueName: \"kubernetes.io/projected/9a5a732b-5b93-4b76-8131-25b1a39b0963-kube-api-access-kczxd\") pod \"ovnkube-node-6s7qr\" (UID: \"9a5a732b-5b93-4b76-8131-25b1a39b0963\") " pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.667887 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p95nv"] Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.669085 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.683410 4611 scope.go:117] "RemoveContainer" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.720993 4611 scope.go:117] "RemoveContainer" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.732925 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.737106 4611 scope.go:117] "RemoveContainer" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.753373 4611 scope.go:117] "RemoveContainer" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.799853 4611 scope.go:117] "RemoveContainer" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.817511 4611 scope.go:117] "RemoveContainer" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.832009 4611 scope.go:117] "RemoveContainer" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.854197 4611 scope.go:117] "RemoveContainer" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.868481 4611 scope.go:117] "RemoveContainer" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.869673 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": container with ID starting with ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8 not found: ID does not exist" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.869764 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} err="failed to get container status \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": rpc error: code = NotFound desc = could not find container \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": container with ID starting with ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.869863 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.870185 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": container with ID starting with f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede not found: ID does not exist" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.870208 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} err="failed to get container status \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": rpc error: code = NotFound desc = could not find container \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": container with ID starting with f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.870222 4611 scope.go:117] "RemoveContainer" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.870451 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": container with ID starting with 43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8 not found: ID does not exist" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.870495 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} err="failed to get container status \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": rpc error: code = NotFound desc = could not find container \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": container with ID starting with 43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.870508 4611 scope.go:117] "RemoveContainer" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.870707 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": container with ID starting with 54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f not found: ID does not exist" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.870740 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} err="failed to get container status \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": rpc error: code = NotFound desc = could not find container \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": container with ID starting with 54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.870759 4611 scope.go:117] "RemoveContainer" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.871030 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": container with ID starting with 96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193 not found: ID does not exist" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.871059 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} err="failed to get container status \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": rpc error: code = NotFound desc = could not find container \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": container with ID starting with 96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.871074 4611 scope.go:117] "RemoveContainer" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.871697 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": container with ID starting with d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37 not found: ID does not exist" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.871721 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} err="failed to get container status \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": rpc error: code = NotFound desc = could not find container \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": container with ID starting with d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.871734 4611 scope.go:117] "RemoveContainer" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.873187 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": container with ID starting with 6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35 not found: ID does not exist" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.873279 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} err="failed to get container status \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": rpc error: code = NotFound desc = could not find container \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": container with ID starting with 6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.873357 4611 scope.go:117] "RemoveContainer" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.873917 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": container with ID starting with 48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302 not found: ID does not exist" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.873939 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} err="failed to get container status \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": rpc error: code = NotFound desc = could not find container \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": container with ID starting with 48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.873954 4611 scope.go:117] "RemoveContainer" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.874721 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": container with ID starting with d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec not found: ID does not exist" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.874924 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} err="failed to get container status \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": rpc error: code = NotFound desc = could not find container \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": container with ID starting with d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.874939 4611 scope.go:117] "RemoveContainer" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" Sep 29 12:50:28 crc kubenswrapper[4611]: E0929 12:50:28.875183 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": container with ID starting with 6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98 not found: ID does not exist" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.875210 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} err="failed to get container status \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": rpc error: code = NotFound desc = could not find container \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": container with ID starting with 6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.875228 4611 scope.go:117] "RemoveContainer" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.875650 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} err="failed to get container status \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": rpc error: code = NotFound desc = could not find container \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": container with ID starting with ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.875931 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.877329 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} err="failed to get container status \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": rpc error: code = NotFound desc = could not find container \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": container with ID starting with f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.877356 4611 scope.go:117] "RemoveContainer" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.877745 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} err="failed to get container status \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": rpc error: code = NotFound desc = could not find container \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": container with ID starting with 43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.877763 4611 scope.go:117] "RemoveContainer" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.878049 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} err="failed to get container status \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": rpc error: code = NotFound desc = could not find container \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": container with ID starting with 54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.878100 4611 scope.go:117] "RemoveContainer" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.878433 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} err="failed to get container status \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": rpc error: code = NotFound desc = could not find container \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": container with ID starting with 96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.878528 4611 scope.go:117] "RemoveContainer" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.879340 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} err="failed to get container status \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": rpc error: code = NotFound desc = could not find container \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": container with ID starting with d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.879376 4611 scope.go:117] "RemoveContainer" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.879833 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} err="failed to get container status \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": rpc error: code = NotFound desc = could not find container \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": container with ID starting with 6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.879861 4611 scope.go:117] "RemoveContainer" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.880394 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} err="failed to get container status \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": rpc error: code = NotFound desc = could not find container \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": container with ID starting with 48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.880437 4611 scope.go:117] "RemoveContainer" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.880753 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} err="failed to get container status \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": rpc error: code = NotFound desc = could not find container \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": container with ID starting with d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.880780 4611 scope.go:117] "RemoveContainer" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.881150 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} err="failed to get container status \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": rpc error: code = NotFound desc = could not find container \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": container with ID starting with 6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.881178 4611 scope.go:117] "RemoveContainer" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.881485 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} err="failed to get container status \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": rpc error: code = NotFound desc = could not find container \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": container with ID starting with ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.881507 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.881815 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} err="failed to get container status \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": rpc error: code = NotFound desc = could not find container \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": container with ID starting with f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.881840 4611 scope.go:117] "RemoveContainer" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.882217 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} err="failed to get container status \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": rpc error: code = NotFound desc = could not find container \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": container with ID starting with 43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.882237 4611 scope.go:117] "RemoveContainer" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.883345 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} err="failed to get container status \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": rpc error: code = NotFound desc = could not find container \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": container with ID starting with 54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.883452 4611 scope.go:117] "RemoveContainer" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.884192 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} err="failed to get container status \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": rpc error: code = NotFound desc = could not find container \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": container with ID starting with 96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.884212 4611 scope.go:117] "RemoveContainer" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.884677 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} err="failed to get container status \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": rpc error: code = NotFound desc = could not find container \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": container with ID starting with d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.884702 4611 scope.go:117] "RemoveContainer" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.884934 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} err="failed to get container status \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": rpc error: code = NotFound desc = could not find container \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": container with ID starting with 6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.884954 4611 scope.go:117] "RemoveContainer" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.885344 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} err="failed to get container status \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": rpc error: code = NotFound desc = could not find container \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": container with ID starting with 48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.885369 4611 scope.go:117] "RemoveContainer" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.885697 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} err="failed to get container status \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": rpc error: code = NotFound desc = could not find container \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": container with ID starting with d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.885722 4611 scope.go:117] "RemoveContainer" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.886232 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} err="failed to get container status \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": rpc error: code = NotFound desc = could not find container \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": container with ID starting with 6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.886255 4611 scope.go:117] "RemoveContainer" containerID="ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.889948 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8"} err="failed to get container status \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": rpc error: code = NotFound desc = could not find container \"ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8\": container with ID starting with ffa860ea68719039f407902d753df3b0a6168f3aa67d47199d30833ed562e8e8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.890018 4611 scope.go:117] "RemoveContainer" containerID="f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.890690 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede"} err="failed to get container status \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": rpc error: code = NotFound desc = could not find container \"f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede\": container with ID starting with f960ee6c3e37d3f9e3e324c817f53cd8741d1b88079e279228ea9cc8df7caede not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.890717 4611 scope.go:117] "RemoveContainer" containerID="43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.891014 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8"} err="failed to get container status \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": rpc error: code = NotFound desc = could not find container \"43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8\": container with ID starting with 43b4221f4d115fb6b65bd1acd14f6c0a9f6c56db74b5a489308435bfe8ccdfb8 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.891037 4611 scope.go:117] "RemoveContainer" containerID="54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.891394 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f"} err="failed to get container status \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": rpc error: code = NotFound desc = could not find container \"54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f\": container with ID starting with 54e13d6fbe0377fab87d2d3f93a87f223c5830934a20858925c383f128f53f7f not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.891419 4611 scope.go:117] "RemoveContainer" containerID="96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.891759 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193"} err="failed to get container status \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": rpc error: code = NotFound desc = could not find container \"96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193\": container with ID starting with 96e78b78f3dba41dbf398381bc16ca069ff05f315a997a08d2753a1327817193 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.891849 4611 scope.go:117] "RemoveContainer" containerID="d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.892215 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37"} err="failed to get container status \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": rpc error: code = NotFound desc = could not find container \"d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37\": container with ID starting with d031ef0945edbf715bc6f35307e5cb33e6afbe66a7d7069da209b79ad16eec37 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.892262 4611 scope.go:117] "RemoveContainer" containerID="6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.892496 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35"} err="failed to get container status \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": rpc error: code = NotFound desc = could not find container \"6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35\": container with ID starting with 6e88897e08871bc98fab4e30afae8a20d4c1d3a11ee2be12389f662f90cbfa35 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.892564 4611 scope.go:117] "RemoveContainer" containerID="48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.893065 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302"} err="failed to get container status \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": rpc error: code = NotFound desc = could not find container \"48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302\": container with ID starting with 48b234cb2d9411cba76e0ce1162747ce8017340db3cd3866dc59253520761302 not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.893117 4611 scope.go:117] "RemoveContainer" containerID="d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.893532 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec"} err="failed to get container status \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": rpc error: code = NotFound desc = could not find container \"d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec\": container with ID starting with d3428ed78e5384f17c94b3df34a5ba9e4ac4995900beb1a450d96ba9947402ec not found: ID does not exist" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.893559 4611 scope.go:117] "RemoveContainer" containerID="6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98" Sep 29 12:50:28 crc kubenswrapper[4611]: I0929 12:50:28.897047 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98"} err="failed to get container status \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": rpc error: code = NotFound desc = could not find container \"6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98\": container with ID starting with 6f2fb029a90e3df19599a6b248bf6512ee83ee6aa99e0b9afa421170dbe89f98 not found: ID does not exist" Sep 29 12:50:29 crc kubenswrapper[4611]: I0929 12:50:29.625419 4611 generic.go:334] "Generic (PLEG): container finished" podID="9a5a732b-5b93-4b76-8131-25b1a39b0963" containerID="840a17b821683816dc4d9a32762ff6577e7d5dc3fdc5b70f1dc0760424ce7d99" exitCode=0 Sep 29 12:50:29 crc kubenswrapper[4611]: I0929 12:50:29.625471 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerDied","Data":"840a17b821683816dc4d9a32762ff6577e7d5dc3fdc5b70f1dc0760424ce7d99"} Sep 29 12:50:29 crc kubenswrapper[4611]: I0929 12:50:29.625494 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"64a5b4567156eec36c327dd9e0446550cda87912afde5fc949dff3721ebe7e2f"} Sep 29 12:50:29 crc kubenswrapper[4611]: I0929 12:50:29.628015 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/2.log" Sep 29 12:50:29 crc kubenswrapper[4611]: I0929 12:50:29.749323 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfec2820-7242-4dd6-9fa5-4ebe161f99ba" path="/var/lib/kubelet/pods/bfec2820-7242-4dd6-9fa5-4ebe161f99ba/volumes" Sep 29 12:50:30 crc kubenswrapper[4611]: I0929 12:50:30.636164 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"29a5a00a8ed8647a2d0cdf5a5257f6ad990ae026963caa35d1e58cc9a00d797d"} Sep 29 12:50:30 crc kubenswrapper[4611]: I0929 12:50:30.636505 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"360f3857e4da8269d934a10d90bd1a36f684e9f96de887f00e009997a0b8f92e"} Sep 29 12:50:30 crc kubenswrapper[4611]: I0929 12:50:30.636520 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"a39c1f6960b33fbba1baf94166f808c34d681dae80ac177570f9b08a5a94a2f1"} Sep 29 12:50:30 crc kubenswrapper[4611]: I0929 12:50:30.636530 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"8eff5faac7052fc7043ab93e33318a8a006097e2785d17920fd3d96d2efdaebd"} Sep 29 12:50:30 crc kubenswrapper[4611]: I0929 12:50:30.636540 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"89cf3ee412ebb4098ef89781a0c341f3281de68a411a0c41d668cbccfdedc840"} Sep 29 12:50:30 crc kubenswrapper[4611]: I0929 12:50:30.636551 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"bc31008653c6b60cf90c2633fc554a82fbc34fa16169bbb33486d64ed2e2cbb9"} Sep 29 12:50:32 crc kubenswrapper[4611]: I0929 12:50:32.650775 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"1d140d2cad62aebe44ea78bba467c89bbcb1367d0d95cc91024787a7c1460a71"} Sep 29 12:50:34 crc kubenswrapper[4611]: I0929 12:50:34.629488 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:50:34 crc kubenswrapper[4611]: I0929 12:50:34.629547 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:50:34 crc kubenswrapper[4611]: I0929 12:50:34.629589 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:50:34 crc kubenswrapper[4611]: I0929 12:50:34.630197 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"49931f0be5e603c2daaa8ecf2f4e39aef9cf15176e498454b5a286b9aedf81bb"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 12:50:34 crc kubenswrapper[4611]: I0929 12:50:34.630434 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://49931f0be5e603c2daaa8ecf2f4e39aef9cf15176e498454b5a286b9aedf81bb" gracePeriod=600 Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.673871 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" event={"ID":"9a5a732b-5b93-4b76-8131-25b1a39b0963","Type":"ContainerStarted","Data":"32e69b96b0134407ade08b73d49b79ae4c6c1be5f623b53d6527a440c605a204"} Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.674222 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.674244 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.674262 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.686653 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="49931f0be5e603c2daaa8ecf2f4e39aef9cf15176e498454b5a286b9aedf81bb" exitCode=0 Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.686703 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"49931f0be5e603c2daaa8ecf2f4e39aef9cf15176e498454b5a286b9aedf81bb"} Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.686740 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"a017a7df5b38d210396454e3d5320e7848368cef39cc3b4e52346ca0e1c69ea7"} Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.686763 4611 scope.go:117] "RemoveContainer" containerID="02c1cf1e6d77a414178e7c5de630b700218c10ae6c54f2823aa24b1d4995bcf3" Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.724895 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" podStartSLOduration=7.724870065 podStartE2EDuration="7.724870065s" podCreationTimestamp="2025-09-29 12:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:50:35.714792723 +0000 UTC m=+622.606312329" watchObservedRunningTime="2025-09-29 12:50:35.724870065 +0000 UTC m=+622.616389671" Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.732709 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:35 crc kubenswrapper[4611]: I0929 12:50:35.756216 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:50:41 crc kubenswrapper[4611]: I0929 12:50:41.736761 4611 scope.go:117] "RemoveContainer" containerID="3019c59039b6ddebfa387398ba37323b792ff4c1ac4de148cceb69288d0121fe" Sep 29 12:50:41 crc kubenswrapper[4611]: E0929 12:50:41.737332 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-csch6_openshift-multus(18731b4e-6360-4d87-b586-0a9dc6b5af1e)\"" pod="openshift-multus/multus-csch6" podUID="18731b4e-6360-4d87-b586-0a9dc6b5af1e" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.391234 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2"] Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.392998 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.395597 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.413108 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2"] Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.566765 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmh8q\" (UniqueName: \"kubernetes.io/projected/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-kube-api-access-gmh8q\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.567382 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.567510 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.668543 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmh8q\" (UniqueName: \"kubernetes.io/projected/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-kube-api-access-gmh8q\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.668685 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.668715 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.669235 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.669571 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.690679 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmh8q\" (UniqueName: \"kubernetes.io/projected/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-kube-api-access-gmh8q\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.708307 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.737361 4611 scope.go:117] "RemoveContainer" containerID="3019c59039b6ddebfa387398ba37323b792ff4c1ac4de148cceb69288d0121fe" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.738713 4611 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(b2b1e12b7de1c35f48a1b516a279a32a0f9abfe45c5c517b33e78fe2c66f4027): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.738809 4611 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(b2b1e12b7de1c35f48a1b516a279a32a0f9abfe45c5c517b33e78fe2c66f4027): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.738831 4611 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(b2b1e12b7de1c35f48a1b516a279a32a0f9abfe45c5c517b33e78fe2c66f4027): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.738872 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace(93a499b8-40a0-4d68-aa6e-df5aaf7f5e21)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace(93a499b8-40a0-4d68-aa6e-df5aaf7f5e21)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(b2b1e12b7de1c35f48a1b516a279a32a0f9abfe45c5c517b33e78fe2c66f4027): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.799860 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: I0929 12:50:55.800285 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.820828 4611 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(70cadaa5945fb8098a9d45be7ec283dbc38e65fd756c4de269e5134d6d0c9684): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.820896 4611 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(70cadaa5945fb8098a9d45be7ec283dbc38e65fd756c4de269e5134d6d0c9684): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.820918 4611 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(70cadaa5945fb8098a9d45be7ec283dbc38e65fd756c4de269e5134d6d0c9684): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:50:55 crc kubenswrapper[4611]: E0929 12:50:55.820980 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace(93a499b8-40a0-4d68-aa6e-df5aaf7f5e21)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace(93a499b8-40a0-4d68-aa6e-df5aaf7f5e21)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_openshift-marketplace_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21_0(70cadaa5945fb8098a9d45be7ec283dbc38e65fd756c4de269e5134d6d0c9684): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" Sep 29 12:50:56 crc kubenswrapper[4611]: I0929 12:50:56.815883 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-csch6_18731b4e-6360-4d87-b586-0a9dc6b5af1e/kube-multus/2.log" Sep 29 12:50:56 crc kubenswrapper[4611]: I0929 12:50:56.816243 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-csch6" event={"ID":"18731b4e-6360-4d87-b586-0a9dc6b5af1e","Type":"ContainerStarted","Data":"293e53d97ee23b67648078a614f966d1eb15f84848770beadbaec16f39838520"} Sep 29 12:50:58 crc kubenswrapper[4611]: I0929 12:50:58.752802 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6s7qr" Sep 29 12:51:09 crc kubenswrapper[4611]: I0929 12:51:09.735409 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:51:09 crc kubenswrapper[4611]: I0929 12:51:09.736516 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:51:09 crc kubenswrapper[4611]: I0929 12:51:09.934972 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2"] Sep 29 12:51:09 crc kubenswrapper[4611]: W0929 12:51:09.954925 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93a499b8_40a0_4d68_aa6e_df5aaf7f5e21.slice/crio-ca8d37563aefcc9e77625898cf71622a2547f7e036414eeb30955b456c1c92df WatchSource:0}: Error finding container ca8d37563aefcc9e77625898cf71622a2547f7e036414eeb30955b456c1c92df: Status 404 returned error can't find the container with id ca8d37563aefcc9e77625898cf71622a2547f7e036414eeb30955b456c1c92df Sep 29 12:51:10 crc kubenswrapper[4611]: I0929 12:51:10.890525 4611 generic.go:334] "Generic (PLEG): container finished" podID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerID="3bc941b3413e40d9e59aefed81354880486712eae031ff94406cb0e6e547176c" exitCode=0 Sep 29 12:51:10 crc kubenswrapper[4611]: I0929 12:51:10.890643 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" event={"ID":"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21","Type":"ContainerDied","Data":"3bc941b3413e40d9e59aefed81354880486712eae031ff94406cb0e6e547176c"} Sep 29 12:51:10 crc kubenswrapper[4611]: I0929 12:51:10.890910 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" event={"ID":"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21","Type":"ContainerStarted","Data":"ca8d37563aefcc9e77625898cf71622a2547f7e036414eeb30955b456c1c92df"} Sep 29 12:51:12 crc kubenswrapper[4611]: I0929 12:51:12.903297 4611 generic.go:334] "Generic (PLEG): container finished" podID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerID="0cc557fd390cc086305e8c2bb0d0dd8e009fb8bd68f742e6049e9ceeb42218a7" exitCode=0 Sep 29 12:51:12 crc kubenswrapper[4611]: I0929 12:51:12.903374 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" event={"ID":"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21","Type":"ContainerDied","Data":"0cc557fd390cc086305e8c2bb0d0dd8e009fb8bd68f742e6049e9ceeb42218a7"} Sep 29 12:51:13 crc kubenswrapper[4611]: I0929 12:51:13.911809 4611 generic.go:334] "Generic (PLEG): container finished" podID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerID="047e0d0833374f8437575f00260e4751608a418001348e7a68c1a6321528bd1c" exitCode=0 Sep 29 12:51:13 crc kubenswrapper[4611]: I0929 12:51:13.912238 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" event={"ID":"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21","Type":"ContainerDied","Data":"047e0d0833374f8437575f00260e4751608a418001348e7a68c1a6321528bd1c"} Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.138886 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.322270 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-util\") pod \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.322350 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmh8q\" (UniqueName: \"kubernetes.io/projected/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-kube-api-access-gmh8q\") pod \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.322465 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-bundle\") pod \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\" (UID: \"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21\") " Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.324717 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-bundle" (OuterVolumeSpecName: "bundle") pod "93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" (UID: "93a499b8-40a0-4d68-aa6e-df5aaf7f5e21"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.333073 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-kube-api-access-gmh8q" (OuterVolumeSpecName: "kube-api-access-gmh8q") pod "93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" (UID: "93a499b8-40a0-4d68-aa6e-df5aaf7f5e21"). InnerVolumeSpecName "kube-api-access-gmh8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.424569 4611 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.424602 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmh8q\" (UniqueName: \"kubernetes.io/projected/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-kube-api-access-gmh8q\") on node \"crc\" DevicePath \"\"" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.733285 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-util" (OuterVolumeSpecName: "util") pod "93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" (UID: "93a499b8-40a0-4d68-aa6e-df5aaf7f5e21"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.829107 4611 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93a499b8-40a0-4d68-aa6e-df5aaf7f5e21-util\") on node \"crc\" DevicePath \"\"" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.924439 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" event={"ID":"93a499b8-40a0-4d68-aa6e-df5aaf7f5e21","Type":"ContainerDied","Data":"ca8d37563aefcc9e77625898cf71622a2547f7e036414eeb30955b456c1c92df"} Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.924497 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca8d37563aefcc9e77625898cf71622a2547f7e036414eeb30955b456c1c92df" Sep 29 12:51:15 crc kubenswrapper[4611]: I0929 12:51:15.924561 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.070387 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b"] Sep 29 12:51:29 crc kubenswrapper[4611]: E0929 12:51:29.071150 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="util" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.071161 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="util" Sep 29 12:51:29 crc kubenswrapper[4611]: E0929 12:51:29.071173 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="extract" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.071178 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="extract" Sep 29 12:51:29 crc kubenswrapper[4611]: E0929 12:51:29.071198 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="pull" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.071204 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="pull" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.071311 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="93a499b8-40a0-4d68-aa6e-df5aaf7f5e21" containerName="extract" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.071707 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.081571 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.081649 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.086349 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-qxmkt" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.096097 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.187239 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.187428 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckbd9\" (UniqueName: \"kubernetes.io/projected/00915c37-bca5-4549-952b-ed9a40de0aa7-kube-api-access-ckbd9\") pod \"obo-prometheus-operator-7c8cf85677-wqk5b\" (UID: \"00915c37-bca5-4549-952b-ed9a40de0aa7\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.188042 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.198166 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.201472 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.204370 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-f5hlt" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.207261 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.207987 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.233300 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.288587 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/80990670-7269-474f-8fe7-dfdb646689b4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv\" (UID: \"80990670-7269-474f-8fe7-dfdb646689b4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.288671 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bedf6678-a16d-4714-8f65-b865c1bc9b16-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx\" (UID: \"bedf6678-a16d-4714-8f65-b865c1bc9b16\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.288703 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/80990670-7269-474f-8fe7-dfdb646689b4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv\" (UID: \"80990670-7269-474f-8fe7-dfdb646689b4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.288962 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckbd9\" (UniqueName: \"kubernetes.io/projected/00915c37-bca5-4549-952b-ed9a40de0aa7-kube-api-access-ckbd9\") pod \"obo-prometheus-operator-7c8cf85677-wqk5b\" (UID: \"00915c37-bca5-4549-952b-ed9a40de0aa7\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.289035 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bedf6678-a16d-4714-8f65-b865c1bc9b16-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx\" (UID: \"bedf6678-a16d-4714-8f65-b865c1bc9b16\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.309566 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckbd9\" (UniqueName: \"kubernetes.io/projected/00915c37-bca5-4549-952b-ed9a40de0aa7-kube-api-access-ckbd9\") pod \"obo-prometheus-operator-7c8cf85677-wqk5b\" (UID: \"00915c37-bca5-4549-952b-ed9a40de0aa7\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.382263 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-9k89j"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.383222 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.387051 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-qgxk2" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.387675 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.390767 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bedf6678-a16d-4714-8f65-b865c1bc9b16-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx\" (UID: \"bedf6678-a16d-4714-8f65-b865c1bc9b16\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.391886 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/80990670-7269-474f-8fe7-dfdb646689b4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv\" (UID: \"80990670-7269-474f-8fe7-dfdb646689b4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.392182 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bedf6678-a16d-4714-8f65-b865c1bc9b16-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx\" (UID: \"bedf6678-a16d-4714-8f65-b865c1bc9b16\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.392259 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/80990670-7269-474f-8fe7-dfdb646689b4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv\" (UID: \"80990670-7269-474f-8fe7-dfdb646689b4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.391462 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.395228 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bedf6678-a16d-4714-8f65-b865c1bc9b16-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx\" (UID: \"bedf6678-a16d-4714-8f65-b865c1bc9b16\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.397875 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bedf6678-a16d-4714-8f65-b865c1bc9b16-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx\" (UID: \"bedf6678-a16d-4714-8f65-b865c1bc9b16\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.398548 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/80990670-7269-474f-8fe7-dfdb646689b4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv\" (UID: \"80990670-7269-474f-8fe7-dfdb646689b4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.412852 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-9k89j"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.424283 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/80990670-7269-474f-8fe7-dfdb646689b4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv\" (UID: \"80990670-7269-474f-8fe7-dfdb646689b4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.493606 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppjck\" (UniqueName: \"kubernetes.io/projected/9d8f225d-549f-436f-98d9-9dc43b350002-kube-api-access-ppjck\") pod \"observability-operator-cc5f78dfc-9k89j\" (UID: \"9d8f225d-549f-436f-98d9-9dc43b350002\") " pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.493679 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/9d8f225d-549f-436f-98d9-9dc43b350002-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-9k89j\" (UID: \"9d8f225d-549f-436f-98d9-9dc43b350002\") " pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.506544 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.527276 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.577868 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-4jn8b"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.578643 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.582933 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-whh5b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.594838 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppjck\" (UniqueName: \"kubernetes.io/projected/9d8f225d-549f-436f-98d9-9dc43b350002-kube-api-access-ppjck\") pod \"observability-operator-cc5f78dfc-9k89j\" (UID: \"9d8f225d-549f-436f-98d9-9dc43b350002\") " pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.594893 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/9d8f225d-549f-436f-98d9-9dc43b350002-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-9k89j\" (UID: \"9d8f225d-549f-436f-98d9-9dc43b350002\") " pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.598267 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-4jn8b"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.605544 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/9d8f225d-549f-436f-98d9-9dc43b350002-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-9k89j\" (UID: \"9d8f225d-549f-436f-98d9-9dc43b350002\") " pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.638264 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppjck\" (UniqueName: \"kubernetes.io/projected/9d8f225d-549f-436f-98d9-9dc43b350002-kube-api-access-ppjck\") pod \"observability-operator-cc5f78dfc-9k89j\" (UID: \"9d8f225d-549f-436f-98d9-9dc43b350002\") " pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.696426 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a0f95584-7024-474f-a6d5-5d4a409e4db4-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-4jn8b\" (UID: \"a0f95584-7024-474f-a6d5-5d4a409e4db4\") " pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.696483 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q2dm\" (UniqueName: \"kubernetes.io/projected/a0f95584-7024-474f-a6d5-5d4a409e4db4-kube-api-access-8q2dm\") pod \"perses-operator-54bc95c9fb-4jn8b\" (UID: \"a0f95584-7024-474f-a6d5-5d4a409e4db4\") " pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.759617 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.797672 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q2dm\" (UniqueName: \"kubernetes.io/projected/a0f95584-7024-474f-a6d5-5d4a409e4db4-kube-api-access-8q2dm\") pod \"perses-operator-54bc95c9fb-4jn8b\" (UID: \"a0f95584-7024-474f-a6d5-5d4a409e4db4\") " pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.797758 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a0f95584-7024-474f-a6d5-5d4a409e4db4-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-4jn8b\" (UID: \"a0f95584-7024-474f-a6d5-5d4a409e4db4\") " pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.798576 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a0f95584-7024-474f-a6d5-5d4a409e4db4-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-4jn8b\" (UID: \"a0f95584-7024-474f-a6d5-5d4a409e4db4\") " pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.825303 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q2dm\" (UniqueName: \"kubernetes.io/projected/a0f95584-7024-474f-a6d5-5d4a409e4db4-kube-api-access-8q2dm\") pod \"perses-operator-54bc95c9fb-4jn8b\" (UID: \"a0f95584-7024-474f-a6d5-5d4a409e4db4\") " pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.920824 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b"] Sep 29 12:51:29 crc kubenswrapper[4611]: I0929 12:51:29.931302 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:51:30 crc kubenswrapper[4611]: I0929 12:51:30.025796 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" event={"ID":"00915c37-bca5-4549-952b-ed9a40de0aa7","Type":"ContainerStarted","Data":"75380465de9358070d1df6be9dd59dc73bc5ab08721fcbf3598580f1292cb219"} Sep 29 12:51:30 crc kubenswrapper[4611]: I0929 12:51:30.162553 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx"] Sep 29 12:51:30 crc kubenswrapper[4611]: W0929 12:51:30.170613 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbedf6678_a16d_4714_8f65_b865c1bc9b16.slice/crio-442bbd9ef10c9aad90db99e2b2fd02a5f6d17558a9ff680d857c0ef91b45a6eb WatchSource:0}: Error finding container 442bbd9ef10c9aad90db99e2b2fd02a5f6d17558a9ff680d857c0ef91b45a6eb: Status 404 returned error can't find the container with id 442bbd9ef10c9aad90db99e2b2fd02a5f6d17558a9ff680d857c0ef91b45a6eb Sep 29 12:51:30 crc kubenswrapper[4611]: I0929 12:51:30.328546 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv"] Sep 29 12:51:30 crc kubenswrapper[4611]: I0929 12:51:30.449309 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-9k89j"] Sep 29 12:51:30 crc kubenswrapper[4611]: W0929 12:51:30.463719 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d8f225d_549f_436f_98d9_9dc43b350002.slice/crio-31be21e9bf4ee0a40533b8207c46f941ecdc5b2182880491eee0024a54380b07 WatchSource:0}: Error finding container 31be21e9bf4ee0a40533b8207c46f941ecdc5b2182880491eee0024a54380b07: Status 404 returned error can't find the container with id 31be21e9bf4ee0a40533b8207c46f941ecdc5b2182880491eee0024a54380b07 Sep 29 12:51:30 crc kubenswrapper[4611]: I0929 12:51:30.530004 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-4jn8b"] Sep 29 12:51:31 crc kubenswrapper[4611]: I0929 12:51:31.031541 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" event={"ID":"a0f95584-7024-474f-a6d5-5d4a409e4db4","Type":"ContainerStarted","Data":"242b2709db635a72de9bf227f36e55c24e7e681de03be7136b4905aa3703b39b"} Sep 29 12:51:31 crc kubenswrapper[4611]: I0929 12:51:31.033270 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" event={"ID":"bedf6678-a16d-4714-8f65-b865c1bc9b16","Type":"ContainerStarted","Data":"442bbd9ef10c9aad90db99e2b2fd02a5f6d17558a9ff680d857c0ef91b45a6eb"} Sep 29 12:51:31 crc kubenswrapper[4611]: I0929 12:51:31.034406 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" event={"ID":"9d8f225d-549f-436f-98d9-9dc43b350002","Type":"ContainerStarted","Data":"31be21e9bf4ee0a40533b8207c46f941ecdc5b2182880491eee0024a54380b07"} Sep 29 12:51:31 crc kubenswrapper[4611]: I0929 12:51:31.035615 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" event={"ID":"80990670-7269-474f-8fe7-dfdb646689b4","Type":"ContainerStarted","Data":"b7a4c4575613da56db8c835ec87b2a27febeda7d42711d902862e9a3a28054ab"} Sep 29 12:51:50 crc kubenswrapper[4611]: E0929 12:51:50.426171 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c" Sep 29 12:51:50 crc kubenswrapper[4611]: E0929 12:51:50.426984 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8q2dm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-54bc95c9fb-4jn8b_openshift-operators(a0f95584-7024-474f-a6d5-5d4a409e4db4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 12:51:50 crc kubenswrapper[4611]: E0929 12:51:50.430953 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" podUID="a0f95584-7024-474f-a6d5-5d4a409e4db4" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.253399 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" event={"ID":"bedf6678-a16d-4714-8f65-b865c1bc9b16","Type":"ContainerStarted","Data":"a4299afd1b9db516c0c1d41e532aca10ad73377d233570d18ff45d0db73f4dc2"} Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.255564 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" event={"ID":"9d8f225d-549f-436f-98d9-9dc43b350002","Type":"ContainerStarted","Data":"2c30cfdcbc89b884fdcf19dff82ff88ecd45feba8e3bdfcf2facf1d98be029d4"} Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.256311 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.258537 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" event={"ID":"00915c37-bca5-4549-952b-ed9a40de0aa7","Type":"ContainerStarted","Data":"1cf7de17aaa6e93bc6cdc8039bcc06cc519233a8825dc731061bd37449ff0bbb"} Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.260187 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" event={"ID":"80990670-7269-474f-8fe7-dfdb646689b4","Type":"ContainerStarted","Data":"4347e5b88fb058ec4129b566053c5d97e2f26f48facc863bf5421678b3751ac7"} Sep 29 12:51:51 crc kubenswrapper[4611]: E0929 12:51:51.261264 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c\\\"\"" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" podUID="a0f95584-7024-474f-a6d5-5d4a409e4db4" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.348558 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx" podStartSLOduration=2.077510493 podStartE2EDuration="22.348538416s" podCreationTimestamp="2025-09-29 12:51:29 +0000 UTC" firstStartedPulling="2025-09-29 12:51:30.179795996 +0000 UTC m=+677.071315602" lastFinishedPulling="2025-09-29 12:51:50.450823919 +0000 UTC m=+697.342343525" observedRunningTime="2025-09-29 12:51:51.277144357 +0000 UTC m=+698.168663983" watchObservedRunningTime="2025-09-29 12:51:51.348538416 +0000 UTC m=+698.240058022" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.362379 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.368184 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv" podStartSLOduration=2.223242957 podStartE2EDuration="22.368169162s" podCreationTimestamp="2025-09-29 12:51:29 +0000 UTC" firstStartedPulling="2025-09-29 12:51:30.343470868 +0000 UTC m=+677.234990474" lastFinishedPulling="2025-09-29 12:51:50.488397073 +0000 UTC m=+697.379916679" observedRunningTime="2025-09-29 12:51:51.367102011 +0000 UTC m=+698.258621617" watchObservedRunningTime="2025-09-29 12:51:51.368169162 +0000 UTC m=+698.259688768" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.439672 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-9k89j" podStartSLOduration=2.398224815 podStartE2EDuration="22.439653434s" podCreationTimestamp="2025-09-29 12:51:29 +0000 UTC" firstStartedPulling="2025-09-29 12:51:30.474365344 +0000 UTC m=+677.365884950" lastFinishedPulling="2025-09-29 12:51:50.515793963 +0000 UTC m=+697.407313569" observedRunningTime="2025-09-29 12:51:51.435638629 +0000 UTC m=+698.327158235" watchObservedRunningTime="2025-09-29 12:51:51.439653434 +0000 UTC m=+698.331173040" Sep 29 12:51:51 crc kubenswrapper[4611]: I0929 12:51:51.440166 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-wqk5b" podStartSLOduration=1.948271875 podStartE2EDuration="22.440158319s" podCreationTimestamp="2025-09-29 12:51:29 +0000 UTC" firstStartedPulling="2025-09-29 12:51:29.958965866 +0000 UTC m=+676.850485472" lastFinishedPulling="2025-09-29 12:51:50.45085231 +0000 UTC m=+697.342371916" observedRunningTime="2025-09-29 12:51:51.397186399 +0000 UTC m=+698.288706025" watchObservedRunningTime="2025-09-29 12:51:51.440158319 +0000 UTC m=+698.331677925" Sep 29 12:52:09 crc kubenswrapper[4611]: I0929 12:52:09.368921 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" event={"ID":"a0f95584-7024-474f-a6d5-5d4a409e4db4","Type":"ContainerStarted","Data":"ce5c425380e91917040dd64bec4c41dbb7630612f9d343dba2036b2aaf4560d8"} Sep 29 12:52:09 crc kubenswrapper[4611]: I0929 12:52:09.369729 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:52:09 crc kubenswrapper[4611]: I0929 12:52:09.391721 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" podStartSLOduration=2.615864936 podStartE2EDuration="40.391688658s" podCreationTimestamp="2025-09-29 12:51:29 +0000 UTC" firstStartedPulling="2025-09-29 12:51:30.5425129 +0000 UTC m=+677.434032506" lastFinishedPulling="2025-09-29 12:52:08.318336622 +0000 UTC m=+715.209856228" observedRunningTime="2025-09-29 12:52:09.388208598 +0000 UTC m=+716.279728224" watchObservedRunningTime="2025-09-29 12:52:09.391688658 +0000 UTC m=+716.283208264" Sep 29 12:52:19 crc kubenswrapper[4611]: I0929 12:52:19.935055 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-4jn8b" Sep 29 12:52:34 crc kubenswrapper[4611]: I0929 12:52:34.629193 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:52:34 crc kubenswrapper[4611]: I0929 12:52:34.630657 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.024380 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h"] Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.025949 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.039409 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h"] Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.043216 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.147507 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.147582 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.147645 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n89hl\" (UniqueName: \"kubernetes.io/projected/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-kube-api-access-n89hl\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.248436 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.248490 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.248545 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n89hl\" (UniqueName: \"kubernetes.io/projected/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-kube-api-access-n89hl\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.248903 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.248944 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.271774 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n89hl\" (UniqueName: \"kubernetes.io/projected/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-kube-api-access-n89hl\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.342990 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:37 crc kubenswrapper[4611]: I0929 12:52:37.565060 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h"] Sep 29 12:52:38 crc kubenswrapper[4611]: I0929 12:52:38.522780 4611 generic.go:334] "Generic (PLEG): container finished" podID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerID="19a2d92859d4dbfecb0e29298787f219caaf94afc85fbb0a64dfe443f11cd6e2" exitCode=0 Sep 29 12:52:38 crc kubenswrapper[4611]: I0929 12:52:38.522885 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" event={"ID":"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807","Type":"ContainerDied","Data":"19a2d92859d4dbfecb0e29298787f219caaf94afc85fbb0a64dfe443f11cd6e2"} Sep 29 12:52:38 crc kubenswrapper[4611]: I0929 12:52:38.523067 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" event={"ID":"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807","Type":"ContainerStarted","Data":"53c9576fb75d2cd8f6d99de35f458a165bfa4952a586469c5c02b8739a794238"} Sep 29 12:52:40 crc kubenswrapper[4611]: I0929 12:52:40.534158 4611 generic.go:334] "Generic (PLEG): container finished" podID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerID="4a43ecc59952a6c0b9cd573c71d977564af800f964954e3b3c249e27ca2eee8f" exitCode=0 Sep 29 12:52:40 crc kubenswrapper[4611]: I0929 12:52:40.534494 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" event={"ID":"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807","Type":"ContainerDied","Data":"4a43ecc59952a6c0b9cd573c71d977564af800f964954e3b3c249e27ca2eee8f"} Sep 29 12:52:41 crc kubenswrapper[4611]: I0929 12:52:41.543506 4611 generic.go:334] "Generic (PLEG): container finished" podID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerID="ed37d388efde503adcec76836d8a79d09aaa7fd99c3572599685d034b10193d2" exitCode=0 Sep 29 12:52:41 crc kubenswrapper[4611]: I0929 12:52:41.543586 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" event={"ID":"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807","Type":"ContainerDied","Data":"ed37d388efde503adcec76836d8a79d09aaa7fd99c3572599685d034b10193d2"} Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.760793 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.821357 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n89hl\" (UniqueName: \"kubernetes.io/projected/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-kube-api-access-n89hl\") pod \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.821449 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-util\") pod \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.821486 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-bundle\") pod \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\" (UID: \"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807\") " Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.822785 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-bundle" (OuterVolumeSpecName: "bundle") pod "6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" (UID: "6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.829783 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-kube-api-access-n89hl" (OuterVolumeSpecName: "kube-api-access-n89hl") pod "6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" (UID: "6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807"). InnerVolumeSpecName "kube-api-access-n89hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.835434 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-util" (OuterVolumeSpecName: "util") pod "6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" (UID: "6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.923864 4611 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-util\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.923902 4611 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:42 crc kubenswrapper[4611]: I0929 12:52:42.923912 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n89hl\" (UniqueName: \"kubernetes.io/projected/6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807-kube-api-access-n89hl\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:43 crc kubenswrapper[4611]: I0929 12:52:43.556706 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" event={"ID":"6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807","Type":"ContainerDied","Data":"53c9576fb75d2cd8f6d99de35f458a165bfa4952a586469c5c02b8739a794238"} Sep 29 12:52:43 crc kubenswrapper[4611]: I0929 12:52:43.556945 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53c9576fb75d2cd8f6d99de35f458a165bfa4952a586469c5c02b8739a794238" Sep 29 12:52:43 crc kubenswrapper[4611]: I0929 12:52:43.556791 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.541291 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hml26"] Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.541903 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" podUID="4348f944-d51c-4fdc-8789-646958d61216" containerName="controller-manager" containerID="cri-o://ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470" gracePeriod=30 Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.656531 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l"] Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.656773 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" podUID="1387ce40-000f-4e95-adda-a347b7574779" containerName="route-controller-manager" containerID="cri-o://9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1" gracePeriod=30 Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.949419 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66"] Sep 29 12:52:45 crc kubenswrapper[4611]: E0929 12:52:45.949897 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="pull" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.949907 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="pull" Sep 29 12:52:45 crc kubenswrapper[4611]: E0929 12:52:45.949915 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="util" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.949920 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="util" Sep 29 12:52:45 crc kubenswrapper[4611]: E0929 12:52:45.949929 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="extract" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.949935 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="extract" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.950029 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807" containerName="extract" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.950407 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.961106 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.961502 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-g6rrs" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.961684 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 12:52:45 crc kubenswrapper[4611]: I0929 12:52:45.988828 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66"] Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.062866 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxqbq\" (UniqueName: \"kubernetes.io/projected/aeb9e2d5-fee2-4175-bca2-0b41f8955b5e-kube-api-access-nxqbq\") pod \"nmstate-operator-5d6f6cfd66-rmq66\" (UID: \"aeb9e2d5-fee2-4175-bca2-0b41f8955b5e\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.140648 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.164486 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxqbq\" (UniqueName: \"kubernetes.io/projected/aeb9e2d5-fee2-4175-bca2-0b41f8955b5e-kube-api-access-nxqbq\") pod \"nmstate-operator-5d6f6cfd66-rmq66\" (UID: \"aeb9e2d5-fee2-4175-bca2-0b41f8955b5e\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.170327 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.190655 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxqbq\" (UniqueName: \"kubernetes.io/projected/aeb9e2d5-fee2-4175-bca2-0b41f8955b5e-kube-api-access-nxqbq\") pod \"nmstate-operator-5d6f6cfd66-rmq66\" (UID: \"aeb9e2d5-fee2-4175-bca2-0b41f8955b5e\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265040 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6blsv\" (UniqueName: \"kubernetes.io/projected/4348f944-d51c-4fdc-8789-646958d61216-kube-api-access-6blsv\") pod \"4348f944-d51c-4fdc-8789-646958d61216\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265085 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1387ce40-000f-4e95-adda-a347b7574779-serving-cert\") pod \"1387ce40-000f-4e95-adda-a347b7574779\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265112 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-config\") pod \"4348f944-d51c-4fdc-8789-646958d61216\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265165 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml27m\" (UniqueName: \"kubernetes.io/projected/1387ce40-000f-4e95-adda-a347b7574779-kube-api-access-ml27m\") pod \"1387ce40-000f-4e95-adda-a347b7574779\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265192 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4348f944-d51c-4fdc-8789-646958d61216-serving-cert\") pod \"4348f944-d51c-4fdc-8789-646958d61216\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265222 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-client-ca\") pod \"4348f944-d51c-4fdc-8789-646958d61216\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265238 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-proxy-ca-bundles\") pod \"4348f944-d51c-4fdc-8789-646958d61216\" (UID: \"4348f944-d51c-4fdc-8789-646958d61216\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265270 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-client-ca\") pod \"1387ce40-000f-4e95-adda-a347b7574779\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.265288 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-config\") pod \"1387ce40-000f-4e95-adda-a347b7574779\" (UID: \"1387ce40-000f-4e95-adda-a347b7574779\") " Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.266167 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-config" (OuterVolumeSpecName: "config") pod "1387ce40-000f-4e95-adda-a347b7574779" (UID: "1387ce40-000f-4e95-adda-a347b7574779"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.266456 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-client-ca" (OuterVolumeSpecName: "client-ca") pod "1387ce40-000f-4e95-adda-a347b7574779" (UID: "1387ce40-000f-4e95-adda-a347b7574779"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.266586 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4348f944-d51c-4fdc-8789-646958d61216" (UID: "4348f944-d51c-4fdc-8789-646958d61216"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.266728 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-client-ca" (OuterVolumeSpecName: "client-ca") pod "4348f944-d51c-4fdc-8789-646958d61216" (UID: "4348f944-d51c-4fdc-8789-646958d61216"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.266818 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-config" (OuterVolumeSpecName: "config") pod "4348f944-d51c-4fdc-8789-646958d61216" (UID: "4348f944-d51c-4fdc-8789-646958d61216"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.268187 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4348f944-d51c-4fdc-8789-646958d61216-kube-api-access-6blsv" (OuterVolumeSpecName: "kube-api-access-6blsv") pod "4348f944-d51c-4fdc-8789-646958d61216" (UID: "4348f944-d51c-4fdc-8789-646958d61216"). InnerVolumeSpecName "kube-api-access-6blsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.269043 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1387ce40-000f-4e95-adda-a347b7574779-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1387ce40-000f-4e95-adda-a347b7574779" (UID: "1387ce40-000f-4e95-adda-a347b7574779"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.269088 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4348f944-d51c-4fdc-8789-646958d61216-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4348f944-d51c-4fdc-8789-646958d61216" (UID: "4348f944-d51c-4fdc-8789-646958d61216"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.269284 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1387ce40-000f-4e95-adda-a347b7574779-kube-api-access-ml27m" (OuterVolumeSpecName: "kube-api-access-ml27m") pod "1387ce40-000f-4e95-adda-a347b7574779" (UID: "1387ce40-000f-4e95-adda-a347b7574779"). InnerVolumeSpecName "kube-api-access-ml27m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.302578 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.366851 4611 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367162 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1387ce40-000f-4e95-adda-a347b7574779-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367240 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6blsv\" (UniqueName: \"kubernetes.io/projected/4348f944-d51c-4fdc-8789-646958d61216-kube-api-access-6blsv\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367309 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1387ce40-000f-4e95-adda-a347b7574779-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367391 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367465 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml27m\" (UniqueName: \"kubernetes.io/projected/1387ce40-000f-4e95-adda-a347b7574779-kube-api-access-ml27m\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367536 4611 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4348f944-d51c-4fdc-8789-646958d61216-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367613 4611 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.367709 4611 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4348f944-d51c-4fdc-8789-646958d61216-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.579245 4611 generic.go:334] "Generic (PLEG): container finished" podID="1387ce40-000f-4e95-adda-a347b7574779" containerID="9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1" exitCode=0 Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.579324 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.579375 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" event={"ID":"1387ce40-000f-4e95-adda-a347b7574779","Type":"ContainerDied","Data":"9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1"} Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.585700 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l" event={"ID":"1387ce40-000f-4e95-adda-a347b7574779","Type":"ContainerDied","Data":"6f488137c416ba8974e9b5a6aa919809649a580fae90a9d7d82cd8606d975def"} Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.585752 4611 scope.go:117] "RemoveContainer" containerID="9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.592056 4611 generic.go:334] "Generic (PLEG): container finished" podID="4348f944-d51c-4fdc-8789-646958d61216" containerID="ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470" exitCode=0 Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.592093 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" event={"ID":"4348f944-d51c-4fdc-8789-646958d61216","Type":"ContainerDied","Data":"ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470"} Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.592117 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" event={"ID":"4348f944-d51c-4fdc-8789-646958d61216","Type":"ContainerDied","Data":"a7a642f434a8107a4dc4b9925abe810351bcf2189f691d38dd599ed8b7b00729"} Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.592164 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hml26" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.608018 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66"] Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.629551 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l"] Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.630503 4611 scope.go:117] "RemoveContainer" containerID="9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1" Sep 29 12:52:46 crc kubenswrapper[4611]: E0929 12:52:46.631504 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1\": container with ID starting with 9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1 not found: ID does not exist" containerID="9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.631610 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1"} err="failed to get container status \"9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1\": rpc error: code = NotFound desc = could not find container \"9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1\": container with ID starting with 9d7fa3eb70dedd920202b6d70b540d34582aab1c7f6a470bb028e2ff99eff8a1 not found: ID does not exist" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.631717 4611 scope.go:117] "RemoveContainer" containerID="ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.634653 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qf59l"] Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.659447 4611 scope.go:117] "RemoveContainer" containerID="ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470" Sep 29 12:52:46 crc kubenswrapper[4611]: E0929 12:52:46.661073 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470\": container with ID starting with ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470 not found: ID does not exist" containerID="ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.661359 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470"} err="failed to get container status \"ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470\": rpc error: code = NotFound desc = could not find container \"ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470\": container with ID starting with ff949e1c63c914544cfbaa5675ce256307e2a074776b85f28fd350fb23db9470 not found: ID does not exist" Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.688331 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hml26"] Sep 29 12:52:46 crc kubenswrapper[4611]: I0929 12:52:46.706106 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hml26"] Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.459953 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-69ff455cb-r9527"] Sep 29 12:52:47 crc kubenswrapper[4611]: E0929 12:52:47.460174 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1387ce40-000f-4e95-adda-a347b7574779" containerName="route-controller-manager" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.460189 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1387ce40-000f-4e95-adda-a347b7574779" containerName="route-controller-manager" Sep 29 12:52:47 crc kubenswrapper[4611]: E0929 12:52:47.460204 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4348f944-d51c-4fdc-8789-646958d61216" containerName="controller-manager" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.460211 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4348f944-d51c-4fdc-8789-646958d61216" containerName="controller-manager" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.460312 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1387ce40-000f-4e95-adda-a347b7574779" containerName="route-controller-manager" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.460323 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="4348f944-d51c-4fdc-8789-646958d61216" containerName="controller-manager" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.460821 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.464577 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds"] Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.465256 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.466288 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.466502 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.467086 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.467594 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.467807 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.469684 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.469947 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.470100 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.470505 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.471888 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.472069 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.472344 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.490974 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69ff455cb-r9527"] Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.494981 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds"] Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.507073 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587552 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36e88c4e-e8e3-46c5-bea6-04554ced2a93-serving-cert\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587652 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-serving-cert\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587693 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwgdv\" (UniqueName: \"kubernetes.io/projected/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-kube-api-access-rwgdv\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587746 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/36e88c4e-e8e3-46c5-bea6-04554ced2a93-client-ca\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587792 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-proxy-ca-bundles\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587834 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36e88c4e-e8e3-46c5-bea6-04554ced2a93-config\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587860 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9rtl\" (UniqueName: \"kubernetes.io/projected/36e88c4e-e8e3-46c5-bea6-04554ced2a93-kube-api-access-l9rtl\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587886 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-client-ca\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.587925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-config\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.600740 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" event={"ID":"aeb9e2d5-fee2-4175-bca2-0b41f8955b5e","Type":"ContainerStarted","Data":"1124e756f5ade1b4d25a92ab38a27b90e9db57670586bf9b9079ba7a3ecd3748"} Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689031 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-proxy-ca-bundles\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689422 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36e88c4e-e8e3-46c5-bea6-04554ced2a93-config\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689452 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9rtl\" (UniqueName: \"kubernetes.io/projected/36e88c4e-e8e3-46c5-bea6-04554ced2a93-kube-api-access-l9rtl\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689475 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-client-ca\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689514 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-config\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689549 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36e88c4e-e8e3-46c5-bea6-04554ced2a93-serving-cert\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689586 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-serving-cert\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689613 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwgdv\" (UniqueName: \"kubernetes.io/projected/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-kube-api-access-rwgdv\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.689650 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/36e88c4e-e8e3-46c5-bea6-04554ced2a93-client-ca\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.690553 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-proxy-ca-bundles\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.690867 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36e88c4e-e8e3-46c5-bea6-04554ced2a93-config\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.690963 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/36e88c4e-e8e3-46c5-bea6-04554ced2a93-client-ca\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.691037 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-client-ca\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.692383 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-config\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.697115 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-serving-cert\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.697122 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36e88c4e-e8e3-46c5-bea6-04554ced2a93-serving-cert\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.708255 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwgdv\" (UniqueName: \"kubernetes.io/projected/a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6-kube-api-access-rwgdv\") pod \"controller-manager-69ff455cb-r9527\" (UID: \"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6\") " pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.718462 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9rtl\" (UniqueName: \"kubernetes.io/projected/36e88c4e-e8e3-46c5-bea6-04554ced2a93-kube-api-access-l9rtl\") pod \"route-controller-manager-5cd759bd69-6bbds\" (UID: \"36e88c4e-e8e3-46c5-bea6-04554ced2a93\") " pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.744225 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1387ce40-000f-4e95-adda-a347b7574779" path="/var/lib/kubelet/pods/1387ce40-000f-4e95-adda-a347b7574779/volumes" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.744993 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4348f944-d51c-4fdc-8789-646958d61216" path="/var/lib/kubelet/pods/4348f944-d51c-4fdc-8789-646958d61216/volumes" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.780662 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:47 crc kubenswrapper[4611]: I0929 12:52:47.830109 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.076834 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69ff455cb-r9527"] Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.158107 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds"] Sep 29 12:52:48 crc kubenswrapper[4611]: W0929 12:52:48.197455 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36e88c4e_e8e3_46c5_bea6_04554ced2a93.slice/crio-22da146a9aebfd7b827d30b084fa5ccade785f8eeaa08a0dce32a8d5eecebcc1 WatchSource:0}: Error finding container 22da146a9aebfd7b827d30b084fa5ccade785f8eeaa08a0dce32a8d5eecebcc1: Status 404 returned error can't find the container with id 22da146a9aebfd7b827d30b084fa5ccade785f8eeaa08a0dce32a8d5eecebcc1 Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.613193 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" event={"ID":"36e88c4e-e8e3-46c5-bea6-04554ced2a93","Type":"ContainerStarted","Data":"ddb7f4ea286fc8cf467ee1d2e8c5648f6fa73752703d72f104bbcf048d0725c3"} Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.613240 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" event={"ID":"36e88c4e-e8e3-46c5-bea6-04554ced2a93","Type":"ContainerStarted","Data":"22da146a9aebfd7b827d30b084fa5ccade785f8eeaa08a0dce32a8d5eecebcc1"} Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.613582 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.614827 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" event={"ID":"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6","Type":"ContainerStarted","Data":"b51d7c955a6c5bcb714a66c4e2f0bf5c71650aa1031030bde29bbec44f7c2ad4"} Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.614872 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" event={"ID":"a2e6f9b6-ef92-4401-b4ea-4f03709d9ba6","Type":"ContainerStarted","Data":"e6fa9ef0c4ffcb951778b5ca2df0a1f91c79faa7c5f47b8386cbc6f062ff22b4"} Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.615766 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.635248 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" podStartSLOduration=3.635225396 podStartE2EDuration="3.635225396s" podCreationTimestamp="2025-09-29 12:52:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:52:48.631981682 +0000 UTC m=+755.523501328" watchObservedRunningTime="2025-09-29 12:52:48.635225396 +0000 UTC m=+755.526745002" Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.659933 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.676803 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-69ff455cb-r9527" podStartSLOduration=3.676784037 podStartE2EDuration="3.676784037s" podCreationTimestamp="2025-09-29 12:52:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:52:48.67516593 +0000 UTC m=+755.566685536" watchObservedRunningTime="2025-09-29 12:52:48.676784037 +0000 UTC m=+755.568303643" Sep 29 12:52:48 crc kubenswrapper[4611]: I0929 12:52:48.974097 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5cd759bd69-6bbds" Sep 29 12:52:50 crc kubenswrapper[4611]: I0929 12:52:50.414705 4611 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 12:52:50 crc kubenswrapper[4611]: I0929 12:52:50.628868 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" event={"ID":"aeb9e2d5-fee2-4175-bca2-0b41f8955b5e","Type":"ContainerStarted","Data":"8af60a8895d4ae165cba91c3ca23e7f9959b4f1c1d619ab1bf4c4bd0a9758658"} Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.741971 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rmq66" podStartSLOduration=3.6505586709999998 podStartE2EDuration="6.741953932s" podCreationTimestamp="2025-09-29 12:52:45 +0000 UTC" firstStartedPulling="2025-09-29 12:52:46.662571294 +0000 UTC m=+753.554090900" lastFinishedPulling="2025-09-29 12:52:49.753966555 +0000 UTC m=+756.645486161" observedRunningTime="2025-09-29 12:52:50.67120588 +0000 UTC m=+757.562725486" watchObservedRunningTime="2025-09-29 12:52:51.741953932 +0000 UTC m=+758.633473528" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.744181 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk"] Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.745093 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.747498 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-xgg4p" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.769027 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk"] Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.793386 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7"] Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.794283 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.797715 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.842082 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7"] Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.842742 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwc7q\" (UniqueName: \"kubernetes.io/projected/46ee623a-0fc0-4e35-a3de-96d6f2cbacb3-kube-api-access-lwc7q\") pod \"nmstate-metrics-58fcddf996-xh6gk\" (UID: \"46ee623a-0fc0-4e35-a3de-96d6f2cbacb3\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.842775 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/715ae13d-ad60-4871-a0f3-9f3575718223-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.842917 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx5cb\" (UniqueName: \"kubernetes.io/projected/715ae13d-ad60-4871-a0f3-9f3575718223-kube-api-access-nx5cb\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.854566 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-xq52r"] Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.855407 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944745 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx5cb\" (UniqueName: \"kubernetes.io/projected/715ae13d-ad60-4871-a0f3-9f3575718223-kube-api-access-nx5cb\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944795 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-nmstate-lock\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944815 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-dbus-socket\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944834 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-ovs-socket\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944850 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwc7q\" (UniqueName: \"kubernetes.io/projected/46ee623a-0fc0-4e35-a3de-96d6f2cbacb3-kube-api-access-lwc7q\") pod \"nmstate-metrics-58fcddf996-xh6gk\" (UID: \"46ee623a-0fc0-4e35-a3de-96d6f2cbacb3\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944873 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/715ae13d-ad60-4871-a0f3-9f3575718223-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.944895 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm2lk\" (UniqueName: \"kubernetes.io/projected/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-kube-api-access-mm2lk\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:51 crc kubenswrapper[4611]: E0929 12:52:51.945379 4611 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Sep 29 12:52:51 crc kubenswrapper[4611]: E0929 12:52:51.945424 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/715ae13d-ad60-4871-a0f3-9f3575718223-tls-key-pair podName:715ae13d-ad60-4871-a0f3-9f3575718223 nodeName:}" failed. No retries permitted until 2025-09-29 12:52:52.445408193 +0000 UTC m=+759.336927799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/715ae13d-ad60-4871-a0f3-9f3575718223-tls-key-pair") pod "nmstate-webhook-6d689559c5-xgvs7" (UID: "715ae13d-ad60-4871-a0f3-9f3575718223") : secret "openshift-nmstate-webhook" not found Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.974744 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwc7q\" (UniqueName: \"kubernetes.io/projected/46ee623a-0fc0-4e35-a3de-96d6f2cbacb3-kube-api-access-lwc7q\") pod \"nmstate-metrics-58fcddf996-xh6gk\" (UID: \"46ee623a-0fc0-4e35-a3de-96d6f2cbacb3\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" Sep 29 12:52:51 crc kubenswrapper[4611]: I0929 12:52:51.978890 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx5cb\" (UniqueName: \"kubernetes.io/projected/715ae13d-ad60-4871-a0f3-9f3575718223-kube-api-access-nx5cb\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.011051 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj"] Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.011914 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.014331 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.016763 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.016987 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-tvhwj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.046250 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-nmstate-lock\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.046378 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-dbus-socket\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.046413 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-ovs-socket\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.046462 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm2lk\" (UniqueName: \"kubernetes.io/projected/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-kube-api-access-mm2lk\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.046338 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-nmstate-lock\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.047098 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-dbus-socket\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.047140 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-ovs-socket\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.048401 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj"] Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.061636 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.083880 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm2lk\" (UniqueName: \"kubernetes.io/projected/9eba9e14-2b7a-4874-a48b-26cbee1d9c6d-kube-api-access-mm2lk\") pod \"nmstate-handler-xq52r\" (UID: \"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d\") " pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.148653 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.148921 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs86s\" (UniqueName: \"kubernetes.io/projected/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-kube-api-access-zs86s\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.149037 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.183123 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:52 crc kubenswrapper[4611]: W0929 12:52:52.229742 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9eba9e14_2b7a_4874_a48b_26cbee1d9c6d.slice/crio-ba1d7ee68247b6f91c41c3537e48f9b3df3c03b4273dba257d775687ea0ab68e WatchSource:0}: Error finding container ba1d7ee68247b6f91c41c3537e48f9b3df3c03b4273dba257d775687ea0ab68e: Status 404 returned error can't find the container with id ba1d7ee68247b6f91c41c3537e48f9b3df3c03b4273dba257d775687ea0ab68e Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.238663 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5fd944d66d-n9c8s"] Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.239577 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.251379 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.251472 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.251549 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs86s\" (UniqueName: \"kubernetes.io/projected/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-kube-api-access-zs86s\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: E0929 12:52:52.251780 4611 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 29 12:52:52 crc kubenswrapper[4611]: E0929 12:52:52.251853 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-plugin-serving-cert podName:f71eaddf-7657-4ba9-a010-aaf7ef007f6e nodeName:}" failed. No retries permitted until 2025-09-29 12:52:52.75183484 +0000 UTC m=+759.643354436 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-n4kjj" (UID: "f71eaddf-7657-4ba9-a010-aaf7ef007f6e") : secret "plugin-serving-cert" not found Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.254850 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5fd944d66d-n9c8s"] Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.258576 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.300464 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs86s\" (UniqueName: \"kubernetes.io/projected/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-kube-api-access-zs86s\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.356961 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-trusted-ca-bundle\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.358123 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-serving-cert\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.358257 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-service-ca\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.358408 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-config\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.359910 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jx2l\" (UniqueName: \"kubernetes.io/projected/3f0a6bac-dc50-4498-b247-94b83cf8454d-kube-api-access-6jx2l\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.360061 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-oauth-config\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.360176 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-oauth-serving-cert\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461453 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-service-ca\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461529 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-config\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461548 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jx2l\" (UniqueName: \"kubernetes.io/projected/3f0a6bac-dc50-4498-b247-94b83cf8454d-kube-api-access-6jx2l\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461601 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-oauth-config\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461720 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-oauth-serving-cert\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461745 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/715ae13d-ad60-4871-a0f3-9f3575718223-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461769 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-trusted-ca-bundle\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.461802 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-serving-cert\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.465270 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-service-ca\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.465554 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-trusted-ca-bundle\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.467747 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-config\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.468236 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3f0a6bac-dc50-4498-b247-94b83cf8454d-oauth-serving-cert\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.469312 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/715ae13d-ad60-4871-a0f3-9f3575718223-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xgvs7\" (UID: \"715ae13d-ad60-4871-a0f3-9f3575718223\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.470923 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-serving-cert\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.472320 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3f0a6bac-dc50-4498-b247-94b83cf8454d-console-oauth-config\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.492838 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk"] Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.499775 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jx2l\" (UniqueName: \"kubernetes.io/projected/3f0a6bac-dc50-4498-b247-94b83cf8454d-kube-api-access-6jx2l\") pod \"console-5fd944d66d-n9c8s\" (UID: \"3f0a6bac-dc50-4498-b247-94b83cf8454d\") " pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.574927 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.640372 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" event={"ID":"46ee623a-0fc0-4e35-a3de-96d6f2cbacb3","Type":"ContainerStarted","Data":"39fc8a5822e03c04e2b02a24b0ca94a7ed9aed6b8ac3fefd2c1a2e46f1c1301d"} Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.641082 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xq52r" event={"ID":"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d","Type":"ContainerStarted","Data":"ba1d7ee68247b6f91c41c3537e48f9b3df3c03b4273dba257d775687ea0ab68e"} Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.717906 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.765385 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.771415 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f71eaddf-7657-4ba9-a010-aaf7ef007f6e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-n4kjj\" (UID: \"f71eaddf-7657-4ba9-a010-aaf7ef007f6e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.833468 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5fd944d66d-n9c8s"] Sep 29 12:52:52 crc kubenswrapper[4611]: I0929 12:52:52.929852 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.180402 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7"] Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.584256 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj"] Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.653807 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" event={"ID":"715ae13d-ad60-4871-a0f3-9f3575718223","Type":"ContainerStarted","Data":"89b0fc5e9138fedc97674db88da5e362610e5de0344f13539513964e6cc67eb6"} Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.655076 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5fd944d66d-n9c8s" event={"ID":"3f0a6bac-dc50-4498-b247-94b83cf8454d","Type":"ContainerStarted","Data":"7db843b5c37564f6bde603ab43d2a81e5f8be06b820f3f4ceb4c5abcc4a29387"} Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.655113 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5fd944d66d-n9c8s" event={"ID":"3f0a6bac-dc50-4498-b247-94b83cf8454d","Type":"ContainerStarted","Data":"c302bdac57737794b0f9dcc0aebf9d2931edcd37f37e9c068bc89f0c9c5645a7"} Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.657163 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" event={"ID":"f71eaddf-7657-4ba9-a010-aaf7ef007f6e","Type":"ContainerStarted","Data":"639ec2e163b5ba44b3338f91d5616a0baef4fdf88b2dbfd25ff9afe09d669f51"} Sep 29 12:52:53 crc kubenswrapper[4611]: I0929 12:52:53.677878 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5fd944d66d-n9c8s" podStartSLOduration=1.677854401 podStartE2EDuration="1.677854401s" podCreationTimestamp="2025-09-29 12:52:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:52:53.677152611 +0000 UTC m=+760.568672217" watchObservedRunningTime="2025-09-29 12:52:53.677854401 +0000 UTC m=+760.569374007" Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.688795 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" event={"ID":"46ee623a-0fc0-4e35-a3de-96d6f2cbacb3","Type":"ContainerStarted","Data":"fec64d34af5f5b68e20a2c5970626dca40ab3f11975b0b9bfc1d0618dea4c570"} Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.691594 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xq52r" event={"ID":"9eba9e14-2b7a-4874-a48b-26cbee1d9c6d","Type":"ContainerStarted","Data":"39088febfe9fe3d907f35114f207706c171e4ece8e595d6e0bb8fde95f8d80da"} Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.691794 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.693599 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" event={"ID":"715ae13d-ad60-4871-a0f3-9f3575718223","Type":"ContainerStarted","Data":"106fe0d0ab25565f0df7ad1494d39a2377f467aeda04d94e4f1b7d93743e5f84"} Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.693989 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.695915 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" event={"ID":"f71eaddf-7657-4ba9-a010-aaf7ef007f6e","Type":"ContainerStarted","Data":"4e3b70065c41723de7fba34d5046fd74273259b07bd74fdf5feee598727ea9a2"} Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.711529 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-xq52r" podStartSLOduration=2.495999096 podStartE2EDuration="6.711514101s" podCreationTimestamp="2025-09-29 12:52:51 +0000 UTC" firstStartedPulling="2025-09-29 12:52:52.235020344 +0000 UTC m=+759.126539950" lastFinishedPulling="2025-09-29 12:52:56.450535349 +0000 UTC m=+763.342054955" observedRunningTime="2025-09-29 12:52:57.705836536 +0000 UTC m=+764.597356172" watchObservedRunningTime="2025-09-29 12:52:57.711514101 +0000 UTC m=+764.603033707" Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.726206 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-n4kjj" podStartSLOduration=3.871362612 podStartE2EDuration="6.726185895s" podCreationTimestamp="2025-09-29 12:52:51 +0000 UTC" firstStartedPulling="2025-09-29 12:52:53.595657695 +0000 UTC m=+760.487177301" lastFinishedPulling="2025-09-29 12:52:56.450480978 +0000 UTC m=+763.342000584" observedRunningTime="2025-09-29 12:52:57.724086834 +0000 UTC m=+764.615606440" watchObservedRunningTime="2025-09-29 12:52:57.726185895 +0000 UTC m=+764.617705501" Sep 29 12:52:57 crc kubenswrapper[4611]: I0929 12:52:57.792529 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" podStartSLOduration=3.540765496 podStartE2EDuration="6.792509263s" podCreationTimestamp="2025-09-29 12:52:51 +0000 UTC" firstStartedPulling="2025-09-29 12:52:53.197234768 +0000 UTC m=+760.088754374" lastFinishedPulling="2025-09-29 12:52:56.448978535 +0000 UTC m=+763.340498141" observedRunningTime="2025-09-29 12:52:57.788994791 +0000 UTC m=+764.680514407" watchObservedRunningTime="2025-09-29 12:52:57.792509263 +0000 UTC m=+764.684028889" Sep 29 12:52:59 crc kubenswrapper[4611]: I0929 12:52:59.708986 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" event={"ID":"46ee623a-0fc0-4e35-a3de-96d6f2cbacb3","Type":"ContainerStarted","Data":"c48b37e663c8144e2088c9f8bf78b4954c99110fc0ac9d73876714af4a887a71"} Sep 29 12:52:59 crc kubenswrapper[4611]: I0929 12:52:59.727859 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-xh6gk" podStartSLOduration=1.954099059 podStartE2EDuration="8.727841165s" podCreationTimestamp="2025-09-29 12:52:51 +0000 UTC" firstStartedPulling="2025-09-29 12:52:52.505480812 +0000 UTC m=+759.397000418" lastFinishedPulling="2025-09-29 12:52:59.279222918 +0000 UTC m=+766.170742524" observedRunningTime="2025-09-29 12:52:59.725066285 +0000 UTC m=+766.616585891" watchObservedRunningTime="2025-09-29 12:52:59.727841165 +0000 UTC m=+766.619360781" Sep 29 12:53:02 crc kubenswrapper[4611]: I0929 12:53:02.205513 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-xq52r" Sep 29 12:53:02 crc kubenswrapper[4611]: I0929 12:53:02.575440 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:53:02 crc kubenswrapper[4611]: I0929 12:53:02.575518 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:53:02 crc kubenswrapper[4611]: I0929 12:53:02.579899 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:53:02 crc kubenswrapper[4611]: I0929 12:53:02.730370 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5fd944d66d-n9c8s" Sep 29 12:53:02 crc kubenswrapper[4611]: I0929 12:53:02.785245 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-c7v9z"] Sep 29 12:53:04 crc kubenswrapper[4611]: I0929 12:53:04.629063 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:53:04 crc kubenswrapper[4611]: I0929 12:53:04.629385 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:53:12 crc kubenswrapper[4611]: I0929 12:53:12.724355 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xgvs7" Sep 29 12:53:20 crc kubenswrapper[4611]: I0929 12:53:20.851754 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-skjsd"] Sep 29 12:53:20 crc kubenswrapper[4611]: I0929 12:53:20.856114 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:20 crc kubenswrapper[4611]: I0929 12:53:20.868710 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skjsd"] Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.001844 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-catalog-content\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.001908 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zcps\" (UniqueName: \"kubernetes.io/projected/b3207e4f-6c3d-47ec-ae85-98648b23a486-kube-api-access-4zcps\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.002039 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-utilities\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.102942 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-utilities\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.103018 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-catalog-content\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.103046 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zcps\" (UniqueName: \"kubernetes.io/projected/b3207e4f-6c3d-47ec-ae85-98648b23a486-kube-api-access-4zcps\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.103568 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-utilities\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.106181 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-catalog-content\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.138083 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zcps\" (UniqueName: \"kubernetes.io/projected/b3207e4f-6c3d-47ec-ae85-98648b23a486-kube-api-access-4zcps\") pod \"community-operators-skjsd\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.189148 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.731147 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skjsd"] Sep 29 12:53:21 crc kubenswrapper[4611]: I0929 12:53:21.849817 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerStarted","Data":"274918806d54bb35afb130813e0ad7da1942a2585a0c4b88a27060fdbb60097e"} Sep 29 12:53:22 crc kubenswrapper[4611]: I0929 12:53:22.858116 4611 generic.go:334] "Generic (PLEG): container finished" podID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerID="368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699" exitCode=0 Sep 29 12:53:22 crc kubenswrapper[4611]: I0929 12:53:22.858185 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerDied","Data":"368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699"} Sep 29 12:53:23 crc kubenswrapper[4611]: I0929 12:53:23.871825 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerStarted","Data":"daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad"} Sep 29 12:53:24 crc kubenswrapper[4611]: I0929 12:53:24.878421 4611 generic.go:334] "Generic (PLEG): container finished" podID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerID="daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad" exitCode=0 Sep 29 12:53:24 crc kubenswrapper[4611]: I0929 12:53:24.878574 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerDied","Data":"daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad"} Sep 29 12:53:25 crc kubenswrapper[4611]: I0929 12:53:25.890085 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerStarted","Data":"6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3"} Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.650517 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-skjsd" podStartSLOduration=4.150447429 podStartE2EDuration="6.650500587s" podCreationTimestamp="2025-09-29 12:53:20 +0000 UTC" firstStartedPulling="2025-09-29 12:53:22.862790247 +0000 UTC m=+789.754309853" lastFinishedPulling="2025-09-29 12:53:25.362843405 +0000 UTC m=+792.254363011" observedRunningTime="2025-09-29 12:53:25.918309972 +0000 UTC m=+792.809829578" watchObservedRunningTime="2025-09-29 12:53:26.650500587 +0000 UTC m=+793.542020193" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.652291 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7"] Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.653496 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.655161 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.667671 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7"] Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.783844 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.784143 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.784403 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwhk6\" (UniqueName: \"kubernetes.io/projected/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-kube-api-access-dwhk6\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.885714 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwhk6\" (UniqueName: \"kubernetes.io/projected/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-kube-api-access-dwhk6\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.885791 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.885863 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.886329 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.886374 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.904030 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwhk6\" (UniqueName: \"kubernetes.io/projected/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-kube-api-access-dwhk6\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:26 crc kubenswrapper[4611]: I0929 12:53:26.970609 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:27 crc kubenswrapper[4611]: I0929 12:53:27.393915 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7"] Sep 29 12:53:27 crc kubenswrapper[4611]: I0929 12:53:27.830805 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-c7v9z" podUID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" containerName="console" containerID="cri-o://e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9" gracePeriod=15 Sep 29 12:53:27 crc kubenswrapper[4611]: I0929 12:53:27.901191 4611 generic.go:334] "Generic (PLEG): container finished" podID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerID="8e2386cd2c14734dfebe17060e8a4d1a53a6021acb5dac09112041e7bd7add90" exitCode=0 Sep 29 12:53:27 crc kubenswrapper[4611]: I0929 12:53:27.901229 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" event={"ID":"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6","Type":"ContainerDied","Data":"8e2386cd2c14734dfebe17060e8a4d1a53a6021acb5dac09112041e7bd7add90"} Sep 29 12:53:27 crc kubenswrapper[4611]: I0929 12:53:27.901256 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" event={"ID":"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6","Type":"ContainerStarted","Data":"f157079ab2aa6956986dc7fd55e74f2c4ad10912bfa1cad21300a520b61b1aa5"} Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.295382 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-c7v9z_c1afa714-1c99-43a8-ba3f-96af7f49abd5/console/0.log" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.295454 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.403637 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-oauth-serving-cert\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404014 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-oauth-config\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404041 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-trusted-ca-bundle\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404063 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-serving-cert\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404138 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-config\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404207 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-service-ca\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404256 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcpgb\" (UniqueName: \"kubernetes.io/projected/c1afa714-1c99-43a8-ba3f-96af7f49abd5-kube-api-access-lcpgb\") pod \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\" (UID: \"c1afa714-1c99-43a8-ba3f-96af7f49abd5\") " Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404712 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404758 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-config" (OuterVolumeSpecName: "console-config") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.404967 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.405195 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-service-ca" (OuterVolumeSpecName: "service-ca") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.405446 4611 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.405476 4611 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.405488 4611 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.405496 4611 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1afa714-1c99-43a8-ba3f-96af7f49abd5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.409400 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.409743 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1afa714-1c99-43a8-ba3f-96af7f49abd5-kube-api-access-lcpgb" (OuterVolumeSpecName: "kube-api-access-lcpgb") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "kube-api-access-lcpgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.409799 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "c1afa714-1c99-43a8-ba3f-96af7f49abd5" (UID: "c1afa714-1c99-43a8-ba3f-96af7f49abd5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.506281 4611 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.506332 4611 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1afa714-1c99-43a8-ba3f-96af7f49abd5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.506344 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcpgb\" (UniqueName: \"kubernetes.io/projected/c1afa714-1c99-43a8-ba3f-96af7f49abd5-kube-api-access-lcpgb\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.911493 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-c7v9z_c1afa714-1c99-43a8-ba3f-96af7f49abd5/console/0.log" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.911536 4611 generic.go:334] "Generic (PLEG): container finished" podID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" containerID="e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9" exitCode=2 Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.911563 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-c7v9z" event={"ID":"c1afa714-1c99-43a8-ba3f-96af7f49abd5","Type":"ContainerDied","Data":"e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9"} Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.911588 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-c7v9z" event={"ID":"c1afa714-1c99-43a8-ba3f-96af7f49abd5","Type":"ContainerDied","Data":"bad3e569cd6cccfcd9818e3b6662db0cd1ee39062a6fa4d0b4629a8913f2a76a"} Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.911603 4611 scope.go:117] "RemoveContainer" containerID="e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.911729 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-c7v9z" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.931097 4611 scope.go:117] "RemoveContainer" containerID="e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9" Sep 29 12:53:28 crc kubenswrapper[4611]: E0929 12:53:28.931637 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9\": container with ID starting with e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9 not found: ID does not exist" containerID="e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.931691 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9"} err="failed to get container status \"e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9\": rpc error: code = NotFound desc = could not find container \"e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9\": container with ID starting with e1a957a17382edafb57adaf33eac31c63d58c13901f6ce0c3006813858a051d9 not found: ID does not exist" Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.949726 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-c7v9z"] Sep 29 12:53:28 crc kubenswrapper[4611]: I0929 12:53:28.954368 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-c7v9z"] Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.743710 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" path="/var/lib/kubelet/pods/c1afa714-1c99-43a8-ba3f-96af7f49abd5/volumes" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.819231 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mdmpb"] Sep 29 12:53:29 crc kubenswrapper[4611]: E0929 12:53:29.819707 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" containerName="console" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.819794 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" containerName="console" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.820002 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1afa714-1c99-43a8-ba3f-96af7f49abd5" containerName="console" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.820975 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.842460 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mdmpb"] Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.918669 4611 generic.go:334] "Generic (PLEG): container finished" podID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerID="9aba34707e529c06330397c284d3c8845fe977171244e460186c978752c875b2" exitCode=0 Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.918737 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" event={"ID":"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6","Type":"ContainerDied","Data":"9aba34707e529c06330397c284d3c8845fe977171244e460186c978752c875b2"} Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.923810 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-catalog-content\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.923845 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cwx7\" (UniqueName: \"kubernetes.io/projected/bd28b8c3-c43b-44f7-9091-ac37615c47f1-kube-api-access-9cwx7\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:29 crc kubenswrapper[4611]: I0929 12:53:29.923893 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-utilities\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.024616 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-catalog-content\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.024688 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cwx7\" (UniqueName: \"kubernetes.io/projected/bd28b8c3-c43b-44f7-9091-ac37615c47f1-kube-api-access-9cwx7\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.024758 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-utilities\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.025404 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-catalog-content\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.025711 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-utilities\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.045744 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cwx7\" (UniqueName: \"kubernetes.io/projected/bd28b8c3-c43b-44f7-9091-ac37615c47f1-kube-api-access-9cwx7\") pod \"redhat-operators-mdmpb\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.136779 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.514144 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mdmpb"] Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.928213 4611 generic.go:334] "Generic (PLEG): container finished" podID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerID="04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743" exitCode=0 Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.928277 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerDied","Data":"04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743"} Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.928693 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerStarted","Data":"5d54f1b6c0fbf924abb5effb6d9b2b30f9328ea035fef016b6d2a613e7821b24"} Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.931614 4611 generic.go:334] "Generic (PLEG): container finished" podID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerID="9912c27454569abb6bdcbe0bafe99efd85956f39beff9a5cf07a9b6b23038558" exitCode=0 Sep 29 12:53:30 crc kubenswrapper[4611]: I0929 12:53:30.931765 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" event={"ID":"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6","Type":"ContainerDied","Data":"9912c27454569abb6bdcbe0bafe99efd85956f39beff9a5cf07a9b6b23038558"} Sep 29 12:53:31 crc kubenswrapper[4611]: I0929 12:53:31.190172 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:31 crc kubenswrapper[4611]: I0929 12:53:31.190772 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:31 crc kubenswrapper[4611]: I0929 12:53:31.233054 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:31 crc kubenswrapper[4611]: I0929 12:53:31.938608 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerStarted","Data":"8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48"} Sep 29 12:53:31 crc kubenswrapper[4611]: I0929 12:53:31.978396 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.237806 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.363138 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwhk6\" (UniqueName: \"kubernetes.io/projected/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-kube-api-access-dwhk6\") pod \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.363211 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-bundle\") pod \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.363264 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-util\") pod \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\" (UID: \"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6\") " Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.364099 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-bundle" (OuterVolumeSpecName: "bundle") pod "d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" (UID: "d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.368225 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-kube-api-access-dwhk6" (OuterVolumeSpecName: "kube-api-access-dwhk6") pod "d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" (UID: "d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6"). InnerVolumeSpecName "kube-api-access-dwhk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.377563 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-util" (OuterVolumeSpecName: "util") pod "d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" (UID: "d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.464345 4611 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-util\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.464370 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwhk6\" (UniqueName: \"kubernetes.io/projected/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-kube-api-access-dwhk6\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.464381 4611 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.945485 4611 generic.go:334] "Generic (PLEG): container finished" podID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerID="8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48" exitCode=0 Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.946735 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerDied","Data":"8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48"} Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.951940 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.951985 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7" event={"ID":"d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6","Type":"ContainerDied","Data":"f157079ab2aa6956986dc7fd55e74f2c4ad10912bfa1cad21300a520b61b1aa5"} Sep 29 12:53:32 crc kubenswrapper[4611]: I0929 12:53:32.952007 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f157079ab2aa6956986dc7fd55e74f2c4ad10912bfa1cad21300a520b61b1aa5" Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.628848 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.629189 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.629253 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.630114 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a017a7df5b38d210396454e3d5320e7848368cef39cc3b4e52346ca0e1c69ea7"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.631364 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://a017a7df5b38d210396454e3d5320e7848368cef39cc3b4e52346ca0e1c69ea7" gracePeriod=600 Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.811433 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skjsd"] Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.811717 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-skjsd" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="registry-server" containerID="cri-o://6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3" gracePeriod=2 Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.964038 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerStarted","Data":"d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb"} Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.966666 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="a017a7df5b38d210396454e3d5320e7848368cef39cc3b4e52346ca0e1c69ea7" exitCode=0 Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.966709 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"a017a7df5b38d210396454e3d5320e7848368cef39cc3b4e52346ca0e1c69ea7"} Sep 29 12:53:34 crc kubenswrapper[4611]: I0929 12:53:34.966738 4611 scope.go:117] "RemoveContainer" containerID="49931f0be5e603c2daaa8ecf2f4e39aef9cf15176e498454b5a286b9aedf81bb" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.778608 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.829307 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-utilities\") pod \"b3207e4f-6c3d-47ec-ae85-98648b23a486\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.829361 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-catalog-content\") pod \"b3207e4f-6c3d-47ec-ae85-98648b23a486\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.829488 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zcps\" (UniqueName: \"kubernetes.io/projected/b3207e4f-6c3d-47ec-ae85-98648b23a486-kube-api-access-4zcps\") pod \"b3207e4f-6c3d-47ec-ae85-98648b23a486\" (UID: \"b3207e4f-6c3d-47ec-ae85-98648b23a486\") " Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.830333 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-utilities" (OuterVolumeSpecName: "utilities") pod "b3207e4f-6c3d-47ec-ae85-98648b23a486" (UID: "b3207e4f-6c3d-47ec-ae85-98648b23a486"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.858004 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3207e4f-6c3d-47ec-ae85-98648b23a486-kube-api-access-4zcps" (OuterVolumeSpecName: "kube-api-access-4zcps") pod "b3207e4f-6c3d-47ec-ae85-98648b23a486" (UID: "b3207e4f-6c3d-47ec-ae85-98648b23a486"). InnerVolumeSpecName "kube-api-access-4zcps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.884856 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3207e4f-6c3d-47ec-ae85-98648b23a486" (UID: "b3207e4f-6c3d-47ec-ae85-98648b23a486"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.931340 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zcps\" (UniqueName: \"kubernetes.io/projected/b3207e4f-6c3d-47ec-ae85-98648b23a486-kube-api-access-4zcps\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.931376 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.931384 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3207e4f-6c3d-47ec-ae85-98648b23a486-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.974110 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"5dabec6d4a1f56079556aa2416ffa504eeb7f0aa06b802b890dac62cf28cc40d"} Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.977005 4611 generic.go:334] "Generic (PLEG): container finished" podID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerID="6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3" exitCode=0 Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.977561 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skjsd" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.978786 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerDied","Data":"6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3"} Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.978821 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skjsd" event={"ID":"b3207e4f-6c3d-47ec-ae85-98648b23a486","Type":"ContainerDied","Data":"274918806d54bb35afb130813e0ad7da1942a2585a0c4b88a27060fdbb60097e"} Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.978840 4611 scope.go:117] "RemoveContainer" containerID="6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3" Sep 29 12:53:35 crc kubenswrapper[4611]: I0929 12:53:35.994917 4611 scope.go:117] "RemoveContainer" containerID="daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.017885 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skjsd"] Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.019890 4611 scope.go:117] "RemoveContainer" containerID="368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.031774 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-skjsd"] Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.046902 4611 scope.go:117] "RemoveContainer" containerID="6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3" Sep 29 12:53:36 crc kubenswrapper[4611]: E0929 12:53:36.047649 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3\": container with ID starting with 6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3 not found: ID does not exist" containerID="6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.047687 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3"} err="failed to get container status \"6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3\": rpc error: code = NotFound desc = could not find container \"6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3\": container with ID starting with 6ff8e89157015a1fd4f579b19b3c6935e0f346013965c598c2017fbf71f1c3f3 not found: ID does not exist" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.047712 4611 scope.go:117] "RemoveContainer" containerID="daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad" Sep 29 12:53:36 crc kubenswrapper[4611]: E0929 12:53:36.049223 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad\": container with ID starting with daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad not found: ID does not exist" containerID="daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.049267 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad"} err="failed to get container status \"daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad\": rpc error: code = NotFound desc = could not find container \"daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad\": container with ID starting with daa754354fd31297e593be3b21b394d6635f064cfde4878e9944625f8fd4d1ad not found: ID does not exist" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.049293 4611 scope.go:117] "RemoveContainer" containerID="368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699" Sep 29 12:53:36 crc kubenswrapper[4611]: E0929 12:53:36.049769 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699\": container with ID starting with 368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699 not found: ID does not exist" containerID="368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.049792 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699"} err="failed to get container status \"368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699\": rpc error: code = NotFound desc = could not find container \"368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699\": container with ID starting with 368fa37b54081505b0f59258f5fe7f567bbc4c98d4a1702fc99fbb07a90f8699 not found: ID does not exist" Sep 29 12:53:36 crc kubenswrapper[4611]: I0929 12:53:36.071400 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mdmpb" podStartSLOduration=3.493155587 podStartE2EDuration="7.071383803s" podCreationTimestamp="2025-09-29 12:53:29 +0000 UTC" firstStartedPulling="2025-09-29 12:53:30.930508107 +0000 UTC m=+797.822027713" lastFinishedPulling="2025-09-29 12:53:34.508736323 +0000 UTC m=+801.400255929" observedRunningTime="2025-09-29 12:53:36.068421927 +0000 UTC m=+802.959941563" watchObservedRunningTime="2025-09-29 12:53:36.071383803 +0000 UTC m=+802.962903409" Sep 29 12:53:37 crc kubenswrapper[4611]: I0929 12:53:37.743022 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" path="/var/lib/kubelet/pods/b3207e4f-6c3d-47ec-ae85-98648b23a486/volumes" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.137365 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.137486 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.209032 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.379956 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh"] Sep 29 12:53:40 crc kubenswrapper[4611]: E0929 12:53:40.380168 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="registry-server" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380182 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="registry-server" Sep 29 12:53:40 crc kubenswrapper[4611]: E0929 12:53:40.380203 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="extract-utilities" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380210 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="extract-utilities" Sep 29 12:53:40 crc kubenswrapper[4611]: E0929 12:53:40.380218 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="util" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380225 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="util" Sep 29 12:53:40 crc kubenswrapper[4611]: E0929 12:53:40.380235 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="extract-content" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380242 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="extract-content" Sep 29 12:53:40 crc kubenswrapper[4611]: E0929 12:53:40.380252 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="pull" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380258 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="pull" Sep 29 12:53:40 crc kubenswrapper[4611]: E0929 12:53:40.380266 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="extract" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380273 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="extract" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380375 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6" containerName="extract" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380389 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3207e4f-6c3d-47ec-ae85-98648b23a486" containerName="registry-server" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.380829 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.387170 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.387204 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.387267 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.388935 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.392756 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-h9769" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.422645 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh"] Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.486732 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwdql\" (UniqueName: \"kubernetes.io/projected/0a1613d5-6707-489a-b5a8-f8aa95ebc744-kube-api-access-fwdql\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.486797 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0a1613d5-6707-489a-b5a8-f8aa95ebc744-apiservice-cert\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.486823 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0a1613d5-6707-489a-b5a8-f8aa95ebc744-webhook-cert\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.587430 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0a1613d5-6707-489a-b5a8-f8aa95ebc744-webhook-cert\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.587808 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwdql\" (UniqueName: \"kubernetes.io/projected/0a1613d5-6707-489a-b5a8-f8aa95ebc744-kube-api-access-fwdql\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.587876 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0a1613d5-6707-489a-b5a8-f8aa95ebc744-apiservice-cert\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.597195 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0a1613d5-6707-489a-b5a8-f8aa95ebc744-webhook-cert\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.601850 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0a1613d5-6707-489a-b5a8-f8aa95ebc744-apiservice-cert\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.609419 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwdql\" (UniqueName: \"kubernetes.io/projected/0a1613d5-6707-489a-b5a8-f8aa95ebc744-kube-api-access-fwdql\") pod \"metallb-operator-controller-manager-857688c5cb-tvmnh\" (UID: \"0a1613d5-6707-489a-b5a8-f8aa95ebc744\") " pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.698009 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.907937 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf"] Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.909190 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.913600 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.913684 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.913707 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-dz77z" Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.939397 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf"] Sep 29 12:53:40 crc kubenswrapper[4611]: I0929 12:53:40.991478 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh"] Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.005815 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c9329be6-e276-49b8-b4ff-89b18b9c350b-webhook-cert\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.005872 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxll2\" (UniqueName: \"kubernetes.io/projected/c9329be6-e276-49b8-b4ff-89b18b9c350b-kube-api-access-jxll2\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.005958 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c9329be6-e276-49b8-b4ff-89b18b9c350b-apiservice-cert\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.028713 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" event={"ID":"0a1613d5-6707-489a-b5a8-f8aa95ebc744","Type":"ContainerStarted","Data":"afd6f503d9dd48a5febb1ab0bc968809d847aa250509ab6e6add8d4fb7d852d1"} Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.086946 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.107098 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c9329be6-e276-49b8-b4ff-89b18b9c350b-apiservice-cert\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.107149 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c9329be6-e276-49b8-b4ff-89b18b9c350b-webhook-cert\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.107181 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxll2\" (UniqueName: \"kubernetes.io/projected/c9329be6-e276-49b8-b4ff-89b18b9c350b-kube-api-access-jxll2\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.114767 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c9329be6-e276-49b8-b4ff-89b18b9c350b-webhook-cert\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.118167 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c9329be6-e276-49b8-b4ff-89b18b9c350b-apiservice-cert\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.143207 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxll2\" (UniqueName: \"kubernetes.io/projected/c9329be6-e276-49b8-b4ff-89b18b9c350b-kube-api-access-jxll2\") pod \"metallb-operator-webhook-server-5b6b57cd56-hx5bf\" (UID: \"c9329be6-e276-49b8-b4ff-89b18b9c350b\") " pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.240951 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:41 crc kubenswrapper[4611]: I0929 12:53:41.506153 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf"] Sep 29 12:53:41 crc kubenswrapper[4611]: W0929 12:53:41.510133 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9329be6_e276_49b8_b4ff_89b18b9c350b.slice/crio-41bdc05c9dc8906f7b013ca448031c4fda45dbb9da21f036c79873fc875ad61c WatchSource:0}: Error finding container 41bdc05c9dc8906f7b013ca448031c4fda45dbb9da21f036c79873fc875ad61c: Status 404 returned error can't find the container with id 41bdc05c9dc8906f7b013ca448031c4fda45dbb9da21f036c79873fc875ad61c Sep 29 12:53:42 crc kubenswrapper[4611]: I0929 12:53:42.034313 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" event={"ID":"c9329be6-e276-49b8-b4ff-89b18b9c350b","Type":"ContainerStarted","Data":"41bdc05c9dc8906f7b013ca448031c4fda45dbb9da21f036c79873fc875ad61c"} Sep 29 12:53:43 crc kubenswrapper[4611]: I0929 12:53:43.408881 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mdmpb"] Sep 29 12:53:44 crc kubenswrapper[4611]: I0929 12:53:44.052860 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mdmpb" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="registry-server" containerID="cri-o://d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb" gracePeriod=2 Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.050897 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.061409 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" event={"ID":"0a1613d5-6707-489a-b5a8-f8aa95ebc744","Type":"ContainerStarted","Data":"27b830846f9b74eea7056e7ecdb7a99b270ea0a2b29a79c4406e62d7c8627d8a"} Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.062011 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.064378 4611 generic.go:334] "Generic (PLEG): container finished" podID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerID="d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb" exitCode=0 Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.064420 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerDied","Data":"d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb"} Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.064445 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdmpb" event={"ID":"bd28b8c3-c43b-44f7-9091-ac37615c47f1","Type":"ContainerDied","Data":"5d54f1b6c0fbf924abb5effb6d9b2b30f9328ea035fef016b6d2a613e7821b24"} Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.064462 4611 scope.go:117] "RemoveContainer" containerID="d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.064587 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdmpb" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.086855 4611 scope.go:117] "RemoveContainer" containerID="8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.111270 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" podStartSLOduration=1.4094538779999999 podStartE2EDuration="5.111234994s" podCreationTimestamp="2025-09-29 12:53:40 +0000 UTC" firstStartedPulling="2025-09-29 12:53:41.020577747 +0000 UTC m=+807.912097353" lastFinishedPulling="2025-09-29 12:53:44.722358863 +0000 UTC m=+811.613878469" observedRunningTime="2025-09-29 12:53:45.111011728 +0000 UTC m=+812.002531344" watchObservedRunningTime="2025-09-29 12:53:45.111234994 +0000 UTC m=+812.002754610" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.115448 4611 scope.go:117] "RemoveContainer" containerID="04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.144678 4611 scope.go:117] "RemoveContainer" containerID="d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb" Sep 29 12:53:45 crc kubenswrapper[4611]: E0929 12:53:45.145154 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb\": container with ID starting with d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb not found: ID does not exist" containerID="d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.145180 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb"} err="failed to get container status \"d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb\": rpc error: code = NotFound desc = could not find container \"d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb\": container with ID starting with d62d9dd816f21e3a750afad8e234ea7815caabb4ec0e45e169e9777553822bdb not found: ID does not exist" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.145204 4611 scope.go:117] "RemoveContainer" containerID="8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48" Sep 29 12:53:45 crc kubenswrapper[4611]: E0929 12:53:45.148033 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48\": container with ID starting with 8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48 not found: ID does not exist" containerID="8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.148073 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48"} err="failed to get container status \"8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48\": rpc error: code = NotFound desc = could not find container \"8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48\": container with ID starting with 8233ad4803d40bc4a5b46bdd4309b5ffe6dcf479eaa6ec7d35fc3519f458cf48 not found: ID does not exist" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.148096 4611 scope.go:117] "RemoveContainer" containerID="04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743" Sep 29 12:53:45 crc kubenswrapper[4611]: E0929 12:53:45.148388 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743\": container with ID starting with 04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743 not found: ID does not exist" containerID="04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.148419 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743"} err="failed to get container status \"04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743\": rpc error: code = NotFound desc = could not find container \"04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743\": container with ID starting with 04f39657b44c06d70c81233c035e357a79c1394767a89d3f5bf67e34fb4a7743 not found: ID does not exist" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.181045 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cwx7\" (UniqueName: \"kubernetes.io/projected/bd28b8c3-c43b-44f7-9091-ac37615c47f1-kube-api-access-9cwx7\") pod \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.181149 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-utilities\") pod \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.181200 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-catalog-content\") pod \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\" (UID: \"bd28b8c3-c43b-44f7-9091-ac37615c47f1\") " Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.188966 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd28b8c3-c43b-44f7-9091-ac37615c47f1-kube-api-access-9cwx7" (OuterVolumeSpecName: "kube-api-access-9cwx7") pod "bd28b8c3-c43b-44f7-9091-ac37615c47f1" (UID: "bd28b8c3-c43b-44f7-9091-ac37615c47f1"). InnerVolumeSpecName "kube-api-access-9cwx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.190160 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-utilities" (OuterVolumeSpecName: "utilities") pod "bd28b8c3-c43b-44f7-9091-ac37615c47f1" (UID: "bd28b8c3-c43b-44f7-9091-ac37615c47f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.284028 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.284294 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cwx7\" (UniqueName: \"kubernetes.io/projected/bd28b8c3-c43b-44f7-9091-ac37615c47f1-kube-api-access-9cwx7\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.290233 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd28b8c3-c43b-44f7-9091-ac37615c47f1" (UID: "bd28b8c3-c43b-44f7-9091-ac37615c47f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.386160 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd28b8c3-c43b-44f7-9091-ac37615c47f1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.396909 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mdmpb"] Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.402736 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mdmpb"] Sep 29 12:53:45 crc kubenswrapper[4611]: I0929 12:53:45.752012 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" path="/var/lib/kubelet/pods/bd28b8c3-c43b-44f7-9091-ac37615c47f1/volumes" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.051874 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rbwfq"] Sep 29 12:53:48 crc kubenswrapper[4611]: E0929 12:53:48.053700 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="extract-content" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.053791 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="extract-content" Sep 29 12:53:48 crc kubenswrapper[4611]: E0929 12:53:48.053867 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="extract-utilities" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.053928 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="extract-utilities" Sep 29 12:53:48 crc kubenswrapper[4611]: E0929 12:53:48.053992 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="registry-server" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.054073 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="registry-server" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.054273 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd28b8c3-c43b-44f7-9091-ac37615c47f1" containerName="registry-server" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.055331 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.065534 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rbwfq"] Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.088232 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" event={"ID":"c9329be6-e276-49b8-b4ff-89b18b9c350b","Type":"ContainerStarted","Data":"a734d5944e569344281acfb37e71ef8e470a5629e95f3971ae1b69dbbe306cda"} Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.088567 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.111016 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" podStartSLOduration=2.046085621 podStartE2EDuration="8.110998937s" podCreationTimestamp="2025-09-29 12:53:40 +0000 UTC" firstStartedPulling="2025-09-29 12:53:41.51397874 +0000 UTC m=+808.405498346" lastFinishedPulling="2025-09-29 12:53:47.578892056 +0000 UTC m=+814.470411662" observedRunningTime="2025-09-29 12:53:48.107881657 +0000 UTC m=+814.999401263" watchObservedRunningTime="2025-09-29 12:53:48.110998937 +0000 UTC m=+815.002518543" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.229859 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rldkh\" (UniqueName: \"kubernetes.io/projected/a695578e-2a34-4ae7-9619-386a2dc5296b-kube-api-access-rldkh\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.230139 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-utilities\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.230193 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-catalog-content\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.331282 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-utilities\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.331373 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-catalog-content\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.331434 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rldkh\" (UniqueName: \"kubernetes.io/projected/a695578e-2a34-4ae7-9619-386a2dc5296b-kube-api-access-rldkh\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.332183 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-catalog-content\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.332308 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-utilities\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.354129 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rldkh\" (UniqueName: \"kubernetes.io/projected/a695578e-2a34-4ae7-9619-386a2dc5296b-kube-api-access-rldkh\") pod \"redhat-marketplace-rbwfq\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.375431 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:48 crc kubenswrapper[4611]: I0929 12:53:48.808075 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rbwfq"] Sep 29 12:53:49 crc kubenswrapper[4611]: I0929 12:53:49.094369 4611 generic.go:334] "Generic (PLEG): container finished" podID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerID="9d5e0516a1452453b95bd1758af1b4ce7c0c247da84ba9cdf6edb64cffc36929" exitCode=0 Sep 29 12:53:49 crc kubenswrapper[4611]: I0929 12:53:49.095614 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rbwfq" event={"ID":"a695578e-2a34-4ae7-9619-386a2dc5296b","Type":"ContainerDied","Data":"9d5e0516a1452453b95bd1758af1b4ce7c0c247da84ba9cdf6edb64cffc36929"} Sep 29 12:53:49 crc kubenswrapper[4611]: I0929 12:53:49.095652 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rbwfq" event={"ID":"a695578e-2a34-4ae7-9619-386a2dc5296b","Type":"ContainerStarted","Data":"39ba312c346f268b353f1675560fc5cf5d4a1ec209be08e11ef7225a869bdb3c"} Sep 29 12:53:51 crc kubenswrapper[4611]: I0929 12:53:51.109614 4611 generic.go:334] "Generic (PLEG): container finished" podID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerID="8b0ea8e8543b2bae47739aa1d5f3baf15e186a93df196e1d42a17057a37b6a38" exitCode=0 Sep 29 12:53:51 crc kubenswrapper[4611]: I0929 12:53:51.109658 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rbwfq" event={"ID":"a695578e-2a34-4ae7-9619-386a2dc5296b","Type":"ContainerDied","Data":"8b0ea8e8543b2bae47739aa1d5f3baf15e186a93df196e1d42a17057a37b6a38"} Sep 29 12:53:52 crc kubenswrapper[4611]: I0929 12:53:52.117706 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rbwfq" event={"ID":"a695578e-2a34-4ae7-9619-386a2dc5296b","Type":"ContainerStarted","Data":"9fdc16dd8aad3455faafeb217ec337cfcd4863ca6dc417174f595961c498080e"} Sep 29 12:53:52 crc kubenswrapper[4611]: I0929 12:53:52.150508 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rbwfq" podStartSLOduration=1.735790335 podStartE2EDuration="4.150490655s" podCreationTimestamp="2025-09-29 12:53:48 +0000 UTC" firstStartedPulling="2025-09-29 12:53:49.096152895 +0000 UTC m=+815.987672501" lastFinishedPulling="2025-09-29 12:53:51.510853215 +0000 UTC m=+818.402372821" observedRunningTime="2025-09-29 12:53:52.145346476 +0000 UTC m=+819.036866082" watchObservedRunningTime="2025-09-29 12:53:52.150490655 +0000 UTC m=+819.042010271" Sep 29 12:53:58 crc kubenswrapper[4611]: I0929 12:53:58.376056 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:58 crc kubenswrapper[4611]: I0929 12:53:58.376494 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:58 crc kubenswrapper[4611]: I0929 12:53:58.417883 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:53:59 crc kubenswrapper[4611]: I0929 12:53:59.191447 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.025214 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mt5g9"] Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.026895 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.051959 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mt5g9"] Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.181482 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-catalog-content\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.181618 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xctmm\" (UniqueName: \"kubernetes.io/projected/43345e36-3eeb-4c14-813f-9f9372f43677-kube-api-access-xctmm\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.181697 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-utilities\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.282929 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xctmm\" (UniqueName: \"kubernetes.io/projected/43345e36-3eeb-4c14-813f-9f9372f43677-kube-api-access-xctmm\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.282992 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-utilities\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.283046 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-catalog-content\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.283467 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-catalog-content\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.283890 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-utilities\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.305383 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xctmm\" (UniqueName: \"kubernetes.io/projected/43345e36-3eeb-4c14-813f-9f9372f43677-kube-api-access-xctmm\") pod \"certified-operators-mt5g9\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.346404 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:00 crc kubenswrapper[4611]: I0929 12:54:00.886545 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mt5g9"] Sep 29 12:54:01 crc kubenswrapper[4611]: I0929 12:54:01.166712 4611 generic.go:334] "Generic (PLEG): container finished" podID="43345e36-3eeb-4c14-813f-9f9372f43677" containerID="9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672" exitCode=0 Sep 29 12:54:01 crc kubenswrapper[4611]: I0929 12:54:01.166999 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerDied","Data":"9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672"} Sep 29 12:54:01 crc kubenswrapper[4611]: I0929 12:54:01.167027 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerStarted","Data":"c49759dc1fa8783bdea73388c354d7712c6c1da4155db7ec63a5d3a4d49f3ed7"} Sep 29 12:54:01 crc kubenswrapper[4611]: I0929 12:54:01.250195 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5b6b57cd56-hx5bf" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.013082 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rbwfq"] Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.013319 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rbwfq" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="registry-server" containerID="cri-o://9fdc16dd8aad3455faafeb217ec337cfcd4863ca6dc417174f595961c498080e" gracePeriod=2 Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.175964 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerStarted","Data":"f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd"} Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.180595 4611 generic.go:334] "Generic (PLEG): container finished" podID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerID="9fdc16dd8aad3455faafeb217ec337cfcd4863ca6dc417174f595961c498080e" exitCode=0 Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.180676 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rbwfq" event={"ID":"a695578e-2a34-4ae7-9619-386a2dc5296b","Type":"ContainerDied","Data":"9fdc16dd8aad3455faafeb217ec337cfcd4863ca6dc417174f595961c498080e"} Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.461453 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.616284 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-utilities\") pod \"a695578e-2a34-4ae7-9619-386a2dc5296b\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.616386 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-catalog-content\") pod \"a695578e-2a34-4ae7-9619-386a2dc5296b\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.616449 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rldkh\" (UniqueName: \"kubernetes.io/projected/a695578e-2a34-4ae7-9619-386a2dc5296b-kube-api-access-rldkh\") pod \"a695578e-2a34-4ae7-9619-386a2dc5296b\" (UID: \"a695578e-2a34-4ae7-9619-386a2dc5296b\") " Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.617154 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-utilities" (OuterVolumeSpecName: "utilities") pod "a695578e-2a34-4ae7-9619-386a2dc5296b" (UID: "a695578e-2a34-4ae7-9619-386a2dc5296b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.626985 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a695578e-2a34-4ae7-9619-386a2dc5296b-kube-api-access-rldkh" (OuterVolumeSpecName: "kube-api-access-rldkh") pod "a695578e-2a34-4ae7-9619-386a2dc5296b" (UID: "a695578e-2a34-4ae7-9619-386a2dc5296b"). InnerVolumeSpecName "kube-api-access-rldkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.630507 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a695578e-2a34-4ae7-9619-386a2dc5296b" (UID: "a695578e-2a34-4ae7-9619-386a2dc5296b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.718015 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.718054 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a695578e-2a34-4ae7-9619-386a2dc5296b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:02 crc kubenswrapper[4611]: I0929 12:54:02.718068 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rldkh\" (UniqueName: \"kubernetes.io/projected/a695578e-2a34-4ae7-9619-386a2dc5296b-kube-api-access-rldkh\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.187855 4611 generic.go:334] "Generic (PLEG): container finished" podID="43345e36-3eeb-4c14-813f-9f9372f43677" containerID="f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd" exitCode=0 Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.187917 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerDied","Data":"f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd"} Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.190483 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rbwfq" event={"ID":"a695578e-2a34-4ae7-9619-386a2dc5296b","Type":"ContainerDied","Data":"39ba312c346f268b353f1675560fc5cf5d4a1ec209be08e11ef7225a869bdb3c"} Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.190582 4611 scope.go:117] "RemoveContainer" containerID="9fdc16dd8aad3455faafeb217ec337cfcd4863ca6dc417174f595961c498080e" Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.190613 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rbwfq" Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.215209 4611 scope.go:117] "RemoveContainer" containerID="8b0ea8e8543b2bae47739aa1d5f3baf15e186a93df196e1d42a17057a37b6a38" Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.236528 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rbwfq"] Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.236613 4611 scope.go:117] "RemoveContainer" containerID="9d5e0516a1452453b95bd1758af1b4ce7c0c247da84ba9cdf6edb64cffc36929" Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.239358 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rbwfq"] Sep 29 12:54:03 crc kubenswrapper[4611]: I0929 12:54:03.743058 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" path="/var/lib/kubelet/pods/a695578e-2a34-4ae7-9619-386a2dc5296b/volumes" Sep 29 12:54:04 crc kubenswrapper[4611]: I0929 12:54:04.200129 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerStarted","Data":"7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23"} Sep 29 12:54:10 crc kubenswrapper[4611]: I0929 12:54:10.347262 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:10 crc kubenswrapper[4611]: I0929 12:54:10.347889 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:10 crc kubenswrapper[4611]: I0929 12:54:10.399274 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:10 crc kubenswrapper[4611]: I0929 12:54:10.417250 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mt5g9" podStartSLOduration=7.854218067 podStartE2EDuration="10.417235716s" podCreationTimestamp="2025-09-29 12:54:00 +0000 UTC" firstStartedPulling="2025-09-29 12:54:01.168516775 +0000 UTC m=+828.060036381" lastFinishedPulling="2025-09-29 12:54:03.731534424 +0000 UTC m=+830.623054030" observedRunningTime="2025-09-29 12:54:04.22094566 +0000 UTC m=+831.112465276" watchObservedRunningTime="2025-09-29 12:54:10.417235716 +0000 UTC m=+837.308755312" Sep 29 12:54:11 crc kubenswrapper[4611]: I0929 12:54:11.279423 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:12 crc kubenswrapper[4611]: I0929 12:54:12.007392 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mt5g9"] Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.249027 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mt5g9" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="registry-server" containerID="cri-o://7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23" gracePeriod=2 Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.624421 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.766371 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xctmm\" (UniqueName: \"kubernetes.io/projected/43345e36-3eeb-4c14-813f-9f9372f43677-kube-api-access-xctmm\") pod \"43345e36-3eeb-4c14-813f-9f9372f43677\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.766465 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-catalog-content\") pod \"43345e36-3eeb-4c14-813f-9f9372f43677\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.766752 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-utilities\") pod \"43345e36-3eeb-4c14-813f-9f9372f43677\" (UID: \"43345e36-3eeb-4c14-813f-9f9372f43677\") " Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.767500 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-utilities" (OuterVolumeSpecName: "utilities") pod "43345e36-3eeb-4c14-813f-9f9372f43677" (UID: "43345e36-3eeb-4c14-813f-9f9372f43677"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.771474 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43345e36-3eeb-4c14-813f-9f9372f43677-kube-api-access-xctmm" (OuterVolumeSpecName: "kube-api-access-xctmm") pod "43345e36-3eeb-4c14-813f-9f9372f43677" (UID: "43345e36-3eeb-4c14-813f-9f9372f43677"). InnerVolumeSpecName "kube-api-access-xctmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.819463 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43345e36-3eeb-4c14-813f-9f9372f43677" (UID: "43345e36-3eeb-4c14-813f-9f9372f43677"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.868262 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.868302 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xctmm\" (UniqueName: \"kubernetes.io/projected/43345e36-3eeb-4c14-813f-9f9372f43677-kube-api-access-xctmm\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:13 crc kubenswrapper[4611]: I0929 12:54:13.868316 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345e36-3eeb-4c14-813f-9f9372f43677-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.255898 4611 generic.go:334] "Generic (PLEG): container finished" podID="43345e36-3eeb-4c14-813f-9f9372f43677" containerID="7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23" exitCode=0 Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.255951 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mt5g9" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.255971 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerDied","Data":"7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23"} Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.256364 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mt5g9" event={"ID":"43345e36-3eeb-4c14-813f-9f9372f43677","Type":"ContainerDied","Data":"c49759dc1fa8783bdea73388c354d7712c6c1da4155db7ec63a5d3a4d49f3ed7"} Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.256402 4611 scope.go:117] "RemoveContainer" containerID="7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.284763 4611 scope.go:117] "RemoveContainer" containerID="f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.289536 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mt5g9"] Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.299144 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mt5g9"] Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.304850 4611 scope.go:117] "RemoveContainer" containerID="9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.319881 4611 scope.go:117] "RemoveContainer" containerID="7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23" Sep 29 12:54:14 crc kubenswrapper[4611]: E0929 12:54:14.320723 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23\": container with ID starting with 7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23 not found: ID does not exist" containerID="7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.320752 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23"} err="failed to get container status \"7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23\": rpc error: code = NotFound desc = could not find container \"7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23\": container with ID starting with 7aa8180c21848aff45fa765806b0f3051c86a03f4e51e85dac5ec74757b63e23 not found: ID does not exist" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.320771 4611 scope.go:117] "RemoveContainer" containerID="f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd" Sep 29 12:54:14 crc kubenswrapper[4611]: E0929 12:54:14.321140 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd\": container with ID starting with f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd not found: ID does not exist" containerID="f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.321163 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd"} err="failed to get container status \"f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd\": rpc error: code = NotFound desc = could not find container \"f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd\": container with ID starting with f6b451ace504cd78cfb7e000dcb46ddcd7056d6cb8fe8e6f3d9bc3cf09b14bfd not found: ID does not exist" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.321177 4611 scope.go:117] "RemoveContainer" containerID="9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672" Sep 29 12:54:14 crc kubenswrapper[4611]: E0929 12:54:14.321443 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672\": container with ID starting with 9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672 not found: ID does not exist" containerID="9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672" Sep 29 12:54:14 crc kubenswrapper[4611]: I0929 12:54:14.321490 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672"} err="failed to get container status \"9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672\": rpc error: code = NotFound desc = could not find container \"9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672\": container with ID starting with 9d9807d0b5cd71850573f311f8e68b4fdde3a305b1c77f993d23bdf5dc439672 not found: ID does not exist" Sep 29 12:54:15 crc kubenswrapper[4611]: I0929 12:54:15.744458 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" path="/var/lib/kubelet/pods/43345e36-3eeb-4c14-813f-9f9372f43677/volumes" Sep 29 12:54:20 crc kubenswrapper[4611]: I0929 12:54:20.701114 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-857688c5cb-tvmnh" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402118 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg"] Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.402392 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="registry-server" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402415 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="registry-server" Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.402432 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="extract-content" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402440 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="extract-content" Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.402450 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="extract-content" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402458 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="extract-content" Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.402478 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="extract-utilities" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402486 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="extract-utilities" Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.402501 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="registry-server" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402508 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="registry-server" Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.402523 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="extract-utilities" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402532 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="extract-utilities" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402685 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a695578e-2a34-4ae7-9619-386a2dc5296b" containerName="registry-server" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.402709 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="43345e36-3eeb-4c14-813f-9f9372f43677" containerName="registry-server" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.403198 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.406074 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.409721 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-9fr92" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.420777 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2bsvt"] Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.426945 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg"] Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.427068 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.429058 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.429864 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.455663 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ca8494f-d4cf-483e-ac41-ebdaa4f585a2-cert\") pod \"frr-k8s-webhook-server-5478bdb765-8zvrg\" (UID: \"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.455980 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpjgb\" (UniqueName: \"kubernetes.io/projected/1ca8494f-d4cf-483e-ac41-ebdaa4f585a2-kube-api-access-tpjgb\") pod \"frr-k8s-webhook-server-5478bdb765-8zvrg\" (UID: \"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.540952 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-gfxw6"] Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.541825 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.543915 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.543971 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.544088 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.545693 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-vf9qx" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.556757 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-sockets\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.556799 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0fdd196-cac0-4d07-93f0-d33fac04af37-metrics-certs\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.556853 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-metrics\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.556910 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-conf\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.556946 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpjgb\" (UniqueName: \"kubernetes.io/projected/1ca8494f-d4cf-483e-ac41-ebdaa4f585a2-kube-api-access-tpjgb\") pod \"frr-k8s-webhook-server-5478bdb765-8zvrg\" (UID: \"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.556968 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf7rj\" (UniqueName: \"kubernetes.io/projected/a0fdd196-cac0-4d07-93f0-d33fac04af37-kube-api-access-gf7rj\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.557039 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-reloader\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.557089 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ca8494f-d4cf-483e-ac41-ebdaa4f585a2-cert\") pod \"frr-k8s-webhook-server-5478bdb765-8zvrg\" (UID: \"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.557111 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-startup\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.562905 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ca8494f-d4cf-483e-ac41-ebdaa4f585a2-cert\") pod \"frr-k8s-webhook-server-5478bdb765-8zvrg\" (UID: \"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.566027 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-777x6"] Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.566999 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.582286 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.601395 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-777x6"] Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.606285 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpjgb\" (UniqueName: \"kubernetes.io/projected/1ca8494f-d4cf-483e-ac41-ebdaa4f585a2-kube-api-access-tpjgb\") pod \"frr-k8s-webhook-server-5478bdb765-8zvrg\" (UID: \"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659099 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-conf\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659178 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e99966f9-0316-4285-aab7-deb192348231-cert\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659212 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659294 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metallb-excludel2\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659342 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf7rj\" (UniqueName: \"kubernetes.io/projected/a0fdd196-cac0-4d07-93f0-d33fac04af37-kube-api-access-gf7rj\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659369 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e99966f9-0316-4285-aab7-deb192348231-metrics-certs\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659396 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-reloader\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659434 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-startup\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659459 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-sockets\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659478 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzl9l\" (UniqueName: \"kubernetes.io/projected/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-kube-api-access-fzl9l\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659503 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0fdd196-cac0-4d07-93f0-d33fac04af37-metrics-certs\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659524 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metrics-certs\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659556 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-metrics\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659582 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76vqt\" (UniqueName: \"kubernetes.io/projected/e99966f9-0316-4285-aab7-deb192348231-kube-api-access-76vqt\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.659864 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-reloader\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.660169 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-sockets\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.660370 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-metrics\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.660502 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-startup\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.661617 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0fdd196-cac0-4d07-93f0-d33fac04af37-frr-conf\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.676750 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf7rj\" (UniqueName: \"kubernetes.io/projected/a0fdd196-cac0-4d07-93f0-d33fac04af37-kube-api-access-gf7rj\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.677122 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0fdd196-cac0-4d07-93f0-d33fac04af37-metrics-certs\") pod \"frr-k8s-2bsvt\" (UID: \"a0fdd196-cac0-4d07-93f0-d33fac04af37\") " pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.724042 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.744756 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.760537 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzl9l\" (UniqueName: \"kubernetes.io/projected/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-kube-api-access-fzl9l\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.760764 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metrics-certs\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.760877 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76vqt\" (UniqueName: \"kubernetes.io/projected/e99966f9-0316-4285-aab7-deb192348231-kube-api-access-76vqt\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.760978 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e99966f9-0316-4285-aab7-deb192348231-cert\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.761091 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.760908 4611 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.761290 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metrics-certs podName:f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5 nodeName:}" failed. No retries permitted until 2025-09-29 12:54:22.261254044 +0000 UTC m=+849.152773720 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metrics-certs") pod "speaker-gfxw6" (UID: "f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5") : secret "speaker-certs-secret" not found Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.761313 4611 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 12:54:21 crc kubenswrapper[4611]: E0929 12:54:21.761361 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist podName:f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5 nodeName:}" failed. No retries permitted until 2025-09-29 12:54:22.261346106 +0000 UTC m=+849.152865712 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist") pod "speaker-gfxw6" (UID: "f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5") : secret "metallb-memberlist" not found Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.761191 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metallb-excludel2\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.761437 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e99966f9-0316-4285-aab7-deb192348231-metrics-certs\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.762024 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metallb-excludel2\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.764879 4611 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.765810 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e99966f9-0316-4285-aab7-deb192348231-metrics-certs\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.778756 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzl9l\" (UniqueName: \"kubernetes.io/projected/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-kube-api-access-fzl9l\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.779951 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e99966f9-0316-4285-aab7-deb192348231-cert\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.782545 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76vqt\" (UniqueName: \"kubernetes.io/projected/e99966f9-0316-4285-aab7-deb192348231-kube-api-access-76vqt\") pod \"controller-5d688f5ffc-777x6\" (UID: \"e99966f9-0316-4285-aab7-deb192348231\") " pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:21 crc kubenswrapper[4611]: I0929 12:54:21.902807 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.208678 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg"] Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.267918 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.268018 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metrics-certs\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:22 crc kubenswrapper[4611]: E0929 12:54:22.268864 4611 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 12:54:22 crc kubenswrapper[4611]: E0929 12:54:22.269303 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist podName:f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5 nodeName:}" failed. No retries permitted until 2025-09-29 12:54:23.269279659 +0000 UTC m=+850.160799315 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist") pod "speaker-gfxw6" (UID: "f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5") : secret "metallb-memberlist" not found Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.275722 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-metrics-certs\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.310430 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"c7b8ad4eac67ea96f056065b61340f11d14d59e70ddabe93e3feaae0c8aa8d4d"} Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.312277 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" event={"ID":"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2","Type":"ContainerStarted","Data":"30e6107359ed5929e66bcbd39a07521ca5cfea13bea9ce9fa8841305a5b49870"} Sep 29 12:54:22 crc kubenswrapper[4611]: I0929 12:54:22.327041 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-777x6"] Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.278709 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.287506 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5-memberlist\") pod \"speaker-gfxw6\" (UID: \"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5\") " pod="metallb-system/speaker-gfxw6" Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.329848 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-777x6" event={"ID":"e99966f9-0316-4285-aab7-deb192348231","Type":"ContainerStarted","Data":"00c0584275ed9d959b2a9f269fa5f02393bb3639fa5124394906a8846b3459a4"} Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.329901 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-777x6" event={"ID":"e99966f9-0316-4285-aab7-deb192348231","Type":"ContainerStarted","Data":"0a3a723f3013b2f7674a488d7ff2c107a84501118f0a531285fed3bbbd7653d9"} Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.329914 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-777x6" event={"ID":"e99966f9-0316-4285-aab7-deb192348231","Type":"ContainerStarted","Data":"4ebc411118ee9f030229ece66858bfbf07f9cd0a6306d97ce28986d693064e14"} Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.330269 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.356463 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gfxw6" Sep 29 12:54:23 crc kubenswrapper[4611]: I0929 12:54:23.357390 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-777x6" podStartSLOduration=2.357369606 podStartE2EDuration="2.357369606s" podCreationTimestamp="2025-09-29 12:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:54:23.35198471 +0000 UTC m=+850.243504316" watchObservedRunningTime="2025-09-29 12:54:23.357369606 +0000 UTC m=+850.248889212" Sep 29 12:54:24 crc kubenswrapper[4611]: I0929 12:54:24.349642 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gfxw6" event={"ID":"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5","Type":"ContainerStarted","Data":"7a39ff4d62bdb5d91261eff43d996f64fb3d3a107034b31053e8e9a33dbc631b"} Sep 29 12:54:24 crc kubenswrapper[4611]: I0929 12:54:24.349963 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gfxw6" event={"ID":"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5","Type":"ContainerStarted","Data":"117b4f042e711771d51b0e3923721b454a94aef123d72e621bb3c1d98ed16074"} Sep 29 12:54:24 crc kubenswrapper[4611]: I0929 12:54:24.349976 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gfxw6" event={"ID":"f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5","Type":"ContainerStarted","Data":"d800a36a414003386388290e225b8abf455ceda343c2fc1bdccd40a4908b38c3"} Sep 29 12:54:24 crc kubenswrapper[4611]: I0929 12:54:24.350239 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-gfxw6" Sep 29 12:54:24 crc kubenswrapper[4611]: I0929 12:54:24.374562 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-gfxw6" podStartSLOduration=3.374548368 podStartE2EDuration="3.374548368s" podCreationTimestamp="2025-09-29 12:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:54:24.372085087 +0000 UTC m=+851.263604683" watchObservedRunningTime="2025-09-29 12:54:24.374548368 +0000 UTC m=+851.266067974" Sep 29 12:54:30 crc kubenswrapper[4611]: I0929 12:54:30.398597 4611 generic.go:334] "Generic (PLEG): container finished" podID="a0fdd196-cac0-4d07-93f0-d33fac04af37" containerID="795474ce088a912100cc3dca254e01194e34b3f76033efe97406935d7e938897" exitCode=0 Sep 29 12:54:30 crc kubenswrapper[4611]: I0929 12:54:30.398699 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerDied","Data":"795474ce088a912100cc3dca254e01194e34b3f76033efe97406935d7e938897"} Sep 29 12:54:30 crc kubenswrapper[4611]: I0929 12:54:30.400764 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" event={"ID":"1ca8494f-d4cf-483e-ac41-ebdaa4f585a2","Type":"ContainerStarted","Data":"867d30f6ccd2c7b85a75a8e908b76c54a06fa4c8d06db7574fe0fab877552b83"} Sep 29 12:54:30 crc kubenswrapper[4611]: I0929 12:54:30.400940 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:30 crc kubenswrapper[4611]: I0929 12:54:30.454765 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" podStartSLOduration=1.509167495 podStartE2EDuration="9.454744663s" podCreationTimestamp="2025-09-29 12:54:21 +0000 UTC" firstStartedPulling="2025-09-29 12:54:22.217253069 +0000 UTC m=+849.108772675" lastFinishedPulling="2025-09-29 12:54:30.162830237 +0000 UTC m=+857.054349843" observedRunningTime="2025-09-29 12:54:30.451910791 +0000 UTC m=+857.343430407" watchObservedRunningTime="2025-09-29 12:54:30.454744663 +0000 UTC m=+857.346264279" Sep 29 12:54:31 crc kubenswrapper[4611]: I0929 12:54:31.407975 4611 generic.go:334] "Generic (PLEG): container finished" podID="a0fdd196-cac0-4d07-93f0-d33fac04af37" containerID="3bbd95ed4753e9d41c7a1f3d56cdd6c22f8c823420b5f35ee8a1b2bacc119e78" exitCode=0 Sep 29 12:54:31 crc kubenswrapper[4611]: I0929 12:54:31.408021 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerDied","Data":"3bbd95ed4753e9d41c7a1f3d56cdd6c22f8c823420b5f35ee8a1b2bacc119e78"} Sep 29 12:54:32 crc kubenswrapper[4611]: I0929 12:54:32.417846 4611 generic.go:334] "Generic (PLEG): container finished" podID="a0fdd196-cac0-4d07-93f0-d33fac04af37" containerID="aecac23ef837518564536679d30600dc66e17ad698ab97632c43abd7efeb7823" exitCode=0 Sep 29 12:54:32 crc kubenswrapper[4611]: I0929 12:54:32.417887 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerDied","Data":"aecac23ef837518564536679d30600dc66e17ad698ab97632c43abd7efeb7823"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.360689 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-gfxw6" Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.430069 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"d8d67aae9101d7694cbb0bc63c87dbf71eb5c5c0939b2d529ba18639fe6d06af"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.430107 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"a8de55d2a1d86c9839da3b9826ad956fa10f5f4b5b4267e5597d8ce72878291d"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.430116 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"5c184466fab91ec958bf12abfcf253d7d7cf8389bd3f6ed8088d48e38049ddc0"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.430155 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"9b9fad42eb6290d4d63c09cb27069dd5b7b41993076c911fff5426a449c95fc4"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.430163 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"c922f998371e1d9a36e6eec6403ea526d0b93d47fcbcf126ea0692e17793bc14"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.430171 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2bsvt" event={"ID":"a0fdd196-cac0-4d07-93f0-d33fac04af37","Type":"ContainerStarted","Data":"0639a66f8300952a43245ec32e88b0a3d303eee1b923b25abb74d7c600ff9720"} Sep 29 12:54:33 crc kubenswrapper[4611]: I0929 12:54:33.431064 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.341940 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2bsvt" podStartSLOduration=7.072065316 podStartE2EDuration="15.341919273s" podCreationTimestamp="2025-09-29 12:54:21 +0000 UTC" firstStartedPulling="2025-09-29 12:54:21.870512773 +0000 UTC m=+848.762032379" lastFinishedPulling="2025-09-29 12:54:30.14036674 +0000 UTC m=+857.031886336" observedRunningTime="2025-09-29 12:54:33.469576831 +0000 UTC m=+860.361096437" watchObservedRunningTime="2025-09-29 12:54:36.341919273 +0000 UTC m=+863.233438879" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.344927 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-c5thm"] Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.345761 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.349184 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.349266 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-v65kj" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.349313 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.377580 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c5thm"] Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.467110 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpnw\" (UniqueName: \"kubernetes.io/projected/15bd6441-f40f-43d0-90cb-b0050d4d1651-kube-api-access-xrpnw\") pod \"openstack-operator-index-c5thm\" (UID: \"15bd6441-f40f-43d0-90cb-b0050d4d1651\") " pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.568543 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpnw\" (UniqueName: \"kubernetes.io/projected/15bd6441-f40f-43d0-90cb-b0050d4d1651-kube-api-access-xrpnw\") pod \"openstack-operator-index-c5thm\" (UID: \"15bd6441-f40f-43d0-90cb-b0050d4d1651\") " pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.587522 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpnw\" (UniqueName: \"kubernetes.io/projected/15bd6441-f40f-43d0-90cb-b0050d4d1651-kube-api-access-xrpnw\") pod \"openstack-operator-index-c5thm\" (UID: \"15bd6441-f40f-43d0-90cb-b0050d4d1651\") " pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.663597 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.746818 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:36 crc kubenswrapper[4611]: I0929 12:54:36.804918 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:37 crc kubenswrapper[4611]: I0929 12:54:37.112477 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c5thm"] Sep 29 12:54:37 crc kubenswrapper[4611]: I0929 12:54:37.456792 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c5thm" event={"ID":"15bd6441-f40f-43d0-90cb-b0050d4d1651","Type":"ContainerStarted","Data":"b61d28ac05afbd206fc3b2782d4eb4eda956355ff7b93c648b8d495856b3e487"} Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.120768 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-c5thm"] Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.726513 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-nbhph"] Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.728211 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.755089 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nbhph"] Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.816813 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvspx\" (UniqueName: \"kubernetes.io/projected/503e11a8-b465-4721-a817-fd82011936eb-kube-api-access-jvspx\") pod \"openstack-operator-index-nbhph\" (UID: \"503e11a8-b465-4721-a817-fd82011936eb\") " pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.918371 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvspx\" (UniqueName: \"kubernetes.io/projected/503e11a8-b465-4721-a817-fd82011936eb-kube-api-access-jvspx\") pod \"openstack-operator-index-nbhph\" (UID: \"503e11a8-b465-4721-a817-fd82011936eb\") " pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:39 crc kubenswrapper[4611]: I0929 12:54:39.938201 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvspx\" (UniqueName: \"kubernetes.io/projected/503e11a8-b465-4721-a817-fd82011936eb-kube-api-access-jvspx\") pod \"openstack-operator-index-nbhph\" (UID: \"503e11a8-b465-4721-a817-fd82011936eb\") " pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.052658 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.480600 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c5thm" event={"ID":"15bd6441-f40f-43d0-90cb-b0050d4d1651","Type":"ContainerStarted","Data":"d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd"} Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.480862 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-c5thm" podUID="15bd6441-f40f-43d0-90cb-b0050d4d1651" containerName="registry-server" containerID="cri-o://d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd" gracePeriod=2 Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.574097 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-c5thm" podStartSLOduration=1.4744697310000001 podStartE2EDuration="4.574076794s" podCreationTimestamp="2025-09-29 12:54:36 +0000 UTC" firstStartedPulling="2025-09-29 12:54:37.129307401 +0000 UTC m=+864.020827007" lastFinishedPulling="2025-09-29 12:54:40.228914464 +0000 UTC m=+867.120434070" observedRunningTime="2025-09-29 12:54:40.497731753 +0000 UTC m=+867.389251359" watchObservedRunningTime="2025-09-29 12:54:40.574076794 +0000 UTC m=+867.465596410" Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.575753 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nbhph"] Sep 29 12:54:40 crc kubenswrapper[4611]: W0929 12:54:40.580508 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod503e11a8_b465_4721_a817_fd82011936eb.slice/crio-8674d82da6dff1831799ff876135b17878ad9529fae5928607c6dcf60f5f2506 WatchSource:0}: Error finding container 8674d82da6dff1831799ff876135b17878ad9529fae5928607c6dcf60f5f2506: Status 404 returned error can't find the container with id 8674d82da6dff1831799ff876135b17878ad9529fae5928607c6dcf60f5f2506 Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.839849 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.937871 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrpnw\" (UniqueName: \"kubernetes.io/projected/15bd6441-f40f-43d0-90cb-b0050d4d1651-kube-api-access-xrpnw\") pod \"15bd6441-f40f-43d0-90cb-b0050d4d1651\" (UID: \"15bd6441-f40f-43d0-90cb-b0050d4d1651\") " Sep 29 12:54:40 crc kubenswrapper[4611]: I0929 12:54:40.945925 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15bd6441-f40f-43d0-90cb-b0050d4d1651-kube-api-access-xrpnw" (OuterVolumeSpecName: "kube-api-access-xrpnw") pod "15bd6441-f40f-43d0-90cb-b0050d4d1651" (UID: "15bd6441-f40f-43d0-90cb-b0050d4d1651"). InnerVolumeSpecName "kube-api-access-xrpnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.039407 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrpnw\" (UniqueName: \"kubernetes.io/projected/15bd6441-f40f-43d0-90cb-b0050d4d1651-kube-api-access-xrpnw\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.487036 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nbhph" event={"ID":"503e11a8-b465-4721-a817-fd82011936eb","Type":"ContainerStarted","Data":"d2066f5c72a1b0b15ee573bf247fd93263b8fdd8ab7ffb82a1c85edc53c80b0d"} Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.487343 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nbhph" event={"ID":"503e11a8-b465-4721-a817-fd82011936eb","Type":"ContainerStarted","Data":"8674d82da6dff1831799ff876135b17878ad9529fae5928607c6dcf60f5f2506"} Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.488498 4611 generic.go:334] "Generic (PLEG): container finished" podID="15bd6441-f40f-43d0-90cb-b0050d4d1651" containerID="d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd" exitCode=0 Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.488530 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c5thm" event={"ID":"15bd6441-f40f-43d0-90cb-b0050d4d1651","Type":"ContainerDied","Data":"d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd"} Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.488532 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c5thm" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.488553 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c5thm" event={"ID":"15bd6441-f40f-43d0-90cb-b0050d4d1651","Type":"ContainerDied","Data":"b61d28ac05afbd206fc3b2782d4eb4eda956355ff7b93c648b8d495856b3e487"} Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.488570 4611 scope.go:117] "RemoveContainer" containerID="d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.506371 4611 scope.go:117] "RemoveContainer" containerID="d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd" Sep 29 12:54:41 crc kubenswrapper[4611]: E0929 12:54:41.506878 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd\": container with ID starting with d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd not found: ID does not exist" containerID="d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.506907 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd"} err="failed to get container status \"d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd\": rpc error: code = NotFound desc = could not find container \"d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd\": container with ID starting with d93fe06d1cdff319b3bb9deb390dac0c8f6d15b15e22e91175a8d612c66bc1dd not found: ID does not exist" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.511239 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-nbhph" podStartSLOduration=2.46960767 podStartE2EDuration="2.51122226s" podCreationTimestamp="2025-09-29 12:54:39 +0000 UTC" firstStartedPulling="2025-09-29 12:54:40.583763853 +0000 UTC m=+867.475283449" lastFinishedPulling="2025-09-29 12:54:40.625378433 +0000 UTC m=+867.516898039" observedRunningTime="2025-09-29 12:54:41.505100263 +0000 UTC m=+868.396619869" watchObservedRunningTime="2025-09-29 12:54:41.51122226 +0000 UTC m=+868.402741856" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.526186 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-c5thm"] Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.529554 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-c5thm"] Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.729163 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-8zvrg" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.744852 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15bd6441-f40f-43d0-90cb-b0050d4d1651" path="/var/lib/kubelet/pods/15bd6441-f40f-43d0-90cb-b0050d4d1651/volumes" Sep 29 12:54:41 crc kubenswrapper[4611]: I0929 12:54:41.908058 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-777x6" Sep 29 12:54:50 crc kubenswrapper[4611]: I0929 12:54:50.053222 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:50 crc kubenswrapper[4611]: I0929 12:54:50.053668 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:50 crc kubenswrapper[4611]: I0929 12:54:50.080659 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:50 crc kubenswrapper[4611]: I0929 12:54:50.561968 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-nbhph" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.749235 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2bsvt" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.963091 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh"] Sep 29 12:54:51 crc kubenswrapper[4611]: E0929 12:54:51.963389 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15bd6441-f40f-43d0-90cb-b0050d4d1651" containerName="registry-server" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.963409 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="15bd6441-f40f-43d0-90cb-b0050d4d1651" containerName="registry-server" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.963546 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="15bd6441-f40f-43d0-90cb-b0050d4d1651" containerName="registry-server" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.964349 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.966999 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-9xwgq" Sep 29 12:54:51 crc kubenswrapper[4611]: I0929 12:54:51.977533 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh"] Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.083776 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-util\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.083959 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-bundle\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.084021 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-989pd\" (UniqueName: \"kubernetes.io/projected/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-kube-api-access-989pd\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.185159 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-util\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.185216 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-bundle\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.185247 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-989pd\" (UniqueName: \"kubernetes.io/projected/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-kube-api-access-989pd\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.185640 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-util\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.185722 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-bundle\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.210602 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-989pd\" (UniqueName: \"kubernetes.io/projected/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-kube-api-access-989pd\") pod \"3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.288997 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:52 crc kubenswrapper[4611]: I0929 12:54:52.664606 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh"] Sep 29 12:54:53 crc kubenswrapper[4611]: I0929 12:54:53.561031 4611 generic.go:334] "Generic (PLEG): container finished" podID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerID="97628cb92c53524cbc9e2d688d66a756acb80624714002eb6d4b053aa7338df7" exitCode=0 Sep 29 12:54:53 crc kubenswrapper[4611]: I0929 12:54:53.561131 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" event={"ID":"83608d6a-9e03-49d5-8e0e-762b5dacbf7b","Type":"ContainerDied","Data":"97628cb92c53524cbc9e2d688d66a756acb80624714002eb6d4b053aa7338df7"} Sep 29 12:54:53 crc kubenswrapper[4611]: I0929 12:54:53.561322 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" event={"ID":"83608d6a-9e03-49d5-8e0e-762b5dacbf7b","Type":"ContainerStarted","Data":"d6260e514d0d5ea95d05064c1a754c1c34bfb89e7fd6649e1249660667de7622"} Sep 29 12:54:54 crc kubenswrapper[4611]: I0929 12:54:54.568991 4611 generic.go:334] "Generic (PLEG): container finished" podID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerID="b5eed88a779db9caf02b4dd4820947f8a3a625ca6de85c5d3179fe9a40aff243" exitCode=0 Sep 29 12:54:54 crc kubenswrapper[4611]: I0929 12:54:54.569285 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" event={"ID":"83608d6a-9e03-49d5-8e0e-762b5dacbf7b","Type":"ContainerDied","Data":"b5eed88a779db9caf02b4dd4820947f8a3a625ca6de85c5d3179fe9a40aff243"} Sep 29 12:54:55 crc kubenswrapper[4611]: I0929 12:54:55.577975 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" event={"ID":"83608d6a-9e03-49d5-8e0e-762b5dacbf7b","Type":"ContainerDied","Data":"f66820d59c3e82d300deaf831ab219711fdf4c5b81d916e0f36b3f6700dbac86"} Sep 29 12:54:55 crc kubenswrapper[4611]: I0929 12:54:55.577935 4611 generic.go:334] "Generic (PLEG): container finished" podID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerID="f66820d59c3e82d300deaf831ab219711fdf4c5b81d916e0f36b3f6700dbac86" exitCode=0 Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.827771 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.945331 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-util\") pod \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.945447 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-bundle\") pod \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.945519 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-989pd\" (UniqueName: \"kubernetes.io/projected/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-kube-api-access-989pd\") pod \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\" (UID: \"83608d6a-9e03-49d5-8e0e-762b5dacbf7b\") " Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.946155 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-bundle" (OuterVolumeSpecName: "bundle") pod "83608d6a-9e03-49d5-8e0e-762b5dacbf7b" (UID: "83608d6a-9e03-49d5-8e0e-762b5dacbf7b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.953534 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-kube-api-access-989pd" (OuterVolumeSpecName: "kube-api-access-989pd") pod "83608d6a-9e03-49d5-8e0e-762b5dacbf7b" (UID: "83608d6a-9e03-49d5-8e0e-762b5dacbf7b"). InnerVolumeSpecName "kube-api-access-989pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:54:56 crc kubenswrapper[4611]: I0929 12:54:56.960459 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-util" (OuterVolumeSpecName: "util") pod "83608d6a-9e03-49d5-8e0e-762b5dacbf7b" (UID: "83608d6a-9e03-49d5-8e0e-762b5dacbf7b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:54:57 crc kubenswrapper[4611]: I0929 12:54:57.046662 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-989pd\" (UniqueName: \"kubernetes.io/projected/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-kube-api-access-989pd\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:57 crc kubenswrapper[4611]: I0929 12:54:57.046711 4611 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-util\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:57 crc kubenswrapper[4611]: I0929 12:54:57.046724 4611 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83608d6a-9e03-49d5-8e0e-762b5dacbf7b-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:54:57 crc kubenswrapper[4611]: I0929 12:54:57.593307 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" event={"ID":"83608d6a-9e03-49d5-8e0e-762b5dacbf7b","Type":"ContainerDied","Data":"d6260e514d0d5ea95d05064c1a754c1c34bfb89e7fd6649e1249660667de7622"} Sep 29 12:54:57 crc kubenswrapper[4611]: I0929 12:54:57.593350 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6260e514d0d5ea95d05064c1a754c1c34bfb89e7fd6649e1249660667de7622" Sep 29 12:54:57 crc kubenswrapper[4611]: I0929 12:54:57.593381 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.704432 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd"] Sep 29 12:55:04 crc kubenswrapper[4611]: E0929 12:55:04.705274 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="util" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.705292 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="util" Sep 29 12:55:04 crc kubenswrapper[4611]: E0929 12:55:04.705305 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="extract" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.705315 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="extract" Sep 29 12:55:04 crc kubenswrapper[4611]: E0929 12:55:04.705341 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="pull" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.705368 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="pull" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.705506 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="83608d6a-9e03-49d5-8e0e-762b5dacbf7b" containerName="extract" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.706350 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.710258 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-r47k8" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.752249 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd"] Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.840212 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj7rp\" (UniqueName: \"kubernetes.io/projected/2db5cdab-8171-4cb8-9bb1-065c1d194657-kube-api-access-vj7rp\") pod \"openstack-operator-controller-operator-6f887f99fd-2t2zd\" (UID: \"2db5cdab-8171-4cb8-9bb1-065c1d194657\") " pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.941140 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj7rp\" (UniqueName: \"kubernetes.io/projected/2db5cdab-8171-4cb8-9bb1-065c1d194657-kube-api-access-vj7rp\") pod \"openstack-operator-controller-operator-6f887f99fd-2t2zd\" (UID: \"2db5cdab-8171-4cb8-9bb1-065c1d194657\") " pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:04 crc kubenswrapper[4611]: I0929 12:55:04.970565 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj7rp\" (UniqueName: \"kubernetes.io/projected/2db5cdab-8171-4cb8-9bb1-065c1d194657-kube-api-access-vj7rp\") pod \"openstack-operator-controller-operator-6f887f99fd-2t2zd\" (UID: \"2db5cdab-8171-4cb8-9bb1-065c1d194657\") " pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:05 crc kubenswrapper[4611]: I0929 12:55:05.024005 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:05 crc kubenswrapper[4611]: I0929 12:55:05.306674 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd"] Sep 29 12:55:05 crc kubenswrapper[4611]: I0929 12:55:05.643884 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" event={"ID":"2db5cdab-8171-4cb8-9bb1-065c1d194657","Type":"ContainerStarted","Data":"313430d2c03071af21d7ba38b479b173c9eeffa98e386307db90b8d5258daef8"} Sep 29 12:55:10 crc kubenswrapper[4611]: I0929 12:55:10.678186 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" event={"ID":"2db5cdab-8171-4cb8-9bb1-065c1d194657","Type":"ContainerStarted","Data":"d5f353a8bcc34949fe80972821aacfc82f5ea7347f015f1754f1a17ed8efcce9"} Sep 29 12:55:13 crc kubenswrapper[4611]: I0929 12:55:13.697405 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" event={"ID":"2db5cdab-8171-4cb8-9bb1-065c1d194657","Type":"ContainerStarted","Data":"0f90cfaa06c4daa96b73db61e545cad31fcc0d2817489565cc4b43933aada620"} Sep 29 12:55:13 crc kubenswrapper[4611]: I0929 12:55:13.697910 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:13 crc kubenswrapper[4611]: I0929 12:55:13.736372 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" podStartSLOduration=2.009669494 podStartE2EDuration="9.736354822s" podCreationTimestamp="2025-09-29 12:55:04 +0000 UTC" firstStartedPulling="2025-09-29 12:55:05.321516876 +0000 UTC m=+892.213036482" lastFinishedPulling="2025-09-29 12:55:13.048202204 +0000 UTC m=+899.939721810" observedRunningTime="2025-09-29 12:55:13.732316636 +0000 UTC m=+900.623836242" watchObservedRunningTime="2025-09-29 12:55:13.736354822 +0000 UTC m=+900.627874428" Sep 29 12:55:15 crc kubenswrapper[4611]: I0929 12:55:15.026871 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6f887f99fd-2t2zd" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.424196 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.425751 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.428815 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-m76c4" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.434091 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.435113 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.447150 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-4jdtn" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.457108 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.470326 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.471616 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.481231 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.482179 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.483687 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-qv68d" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.484049 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-74msb" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.504885 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5bgz\" (UniqueName: \"kubernetes.io/projected/4f637c90-4822-4587-922d-3dbf2240977b-kube-api-access-n5bgz\") pod \"barbican-operator-controller-manager-7fdd8988b7-527vk\" (UID: \"4f637c90-4822-4587-922d-3dbf2240977b\") " pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.539022 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.555421 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.559874 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.560887 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.566787 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-ttxz5" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.572548 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.582545 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.583650 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.588395 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-bpk4g" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.604020 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.605489 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.606038 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwgzw\" (UniqueName: \"kubernetes.io/projected/6e145eda-2d1e-414d-b09c-b78dc328af46-kube-api-access-kwgzw\") pod \"cinder-operator-controller-manager-5d9d689896-m94tx\" (UID: \"6e145eda-2d1e-414d-b09c-b78dc328af46\") " pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.606117 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mtxx\" (UniqueName: \"kubernetes.io/projected/5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b-kube-api-access-7mtxx\") pod \"glance-operator-controller-manager-7c68997f6b-thmfm\" (UID: \"5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b\") " pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.606167 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5bgz\" (UniqueName: \"kubernetes.io/projected/4f637c90-4822-4587-922d-3dbf2240977b-kube-api-access-n5bgz\") pod \"barbican-operator-controller-manager-7fdd8988b7-527vk\" (UID: \"4f637c90-4822-4587-922d-3dbf2240977b\") " pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.606192 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65mvc\" (UniqueName: \"kubernetes.io/projected/09ccaa74-5871-4408-8476-54e35b95a774-kube-api-access-65mvc\") pod \"designate-operator-controller-manager-54767c9785-ph2fv\" (UID: \"09ccaa74-5871-4408-8476-54e35b95a774\") " pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.610127 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.612654 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.613082 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-r92fh" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.633248 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.634231 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.639680 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-747665895-hdcxr"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.640707 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.642686 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hvjht" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.653060 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.656599 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-vqz6s" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.672278 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5bgz\" (UniqueName: \"kubernetes.io/projected/4f637c90-4822-4587-922d-3dbf2240977b-kube-api-access-n5bgz\") pod \"barbican-operator-controller-manager-7fdd8988b7-527vk\" (UID: \"4f637c90-4822-4587-922d-3dbf2240977b\") " pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.675730 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.681317 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-747665895-hdcxr"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.696612 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.697935 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.704130 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-cvqvt" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.711815 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mtxx\" (UniqueName: \"kubernetes.io/projected/5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b-kube-api-access-7mtxx\") pod \"glance-operator-controller-manager-7c68997f6b-thmfm\" (UID: \"5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b\") " pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.711911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.711943 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65mvc\" (UniqueName: \"kubernetes.io/projected/09ccaa74-5871-4408-8476-54e35b95a774-kube-api-access-65mvc\") pod \"designate-operator-controller-manager-54767c9785-ph2fv\" (UID: \"09ccaa74-5871-4408-8476-54e35b95a774\") " pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.712022 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9259\" (UniqueName: \"kubernetes.io/projected/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-kube-api-access-j9259\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.712071 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwgzw\" (UniqueName: \"kubernetes.io/projected/6e145eda-2d1e-414d-b09c-b78dc328af46-kube-api-access-kwgzw\") pod \"cinder-operator-controller-manager-5d9d689896-m94tx\" (UID: \"6e145eda-2d1e-414d-b09c-b78dc328af46\") " pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.712101 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxg95\" (UniqueName: \"kubernetes.io/projected/c01aecec-3545-4b0d-a81f-0440b1cc2c19-kube-api-access-hxg95\") pod \"heat-operator-controller-manager-7bb9679997-2fcdq\" (UID: \"c01aecec-3545-4b0d-a81f-0440b1cc2c19\") " pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.712163 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwtsd\" (UniqueName: \"kubernetes.io/projected/198f1c1f-a452-4e1a-be6a-7bcfbe372441-kube-api-access-zwtsd\") pod \"horizon-operator-controller-manager-769bb6b489-8mwgc\" (UID: \"198f1c1f-a452-4e1a-be6a-7bcfbe372441\") " pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.712235 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqxq6\" (UniqueName: \"kubernetes.io/projected/78aeae10-6ff4-4ec2-9a6e-617b5b774122-kube-api-access-bqxq6\") pod \"ironic-operator-controller-manager-68ccf47b7f-dr6tt\" (UID: \"78aeae10-6ff4-4ec2-9a6e-617b5b774122\") " pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.718717 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.746936 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mtxx\" (UniqueName: \"kubernetes.io/projected/5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b-kube-api-access-7mtxx\") pod \"glance-operator-controller-manager-7c68997f6b-thmfm\" (UID: \"5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b\") " pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.749394 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.764609 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65mvc\" (UniqueName: \"kubernetes.io/projected/09ccaa74-5871-4408-8476-54e35b95a774-kube-api-access-65mvc\") pod \"designate-operator-controller-manager-54767c9785-ph2fv\" (UID: \"09ccaa74-5871-4408-8476-54e35b95a774\") " pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.778217 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwgzw\" (UniqueName: \"kubernetes.io/projected/6e145eda-2d1e-414d-b09c-b78dc328af46-kube-api-access-kwgzw\") pod \"cinder-operator-controller-manager-5d9d689896-m94tx\" (UID: \"6e145eda-2d1e-414d-b09c-b78dc328af46\") " pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.799352 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.803456 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.804361 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.805525 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.806420 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.806554 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.806855 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.808241 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.808616 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.815330 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-kgfvm" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.815834 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9259\" (UniqueName: \"kubernetes.io/projected/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-kube-api-access-j9259\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.815891 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwwkq\" (UniqueName: \"kubernetes.io/projected/6cb9eef2-e10a-4a0c-bf29-8ade30f57048-kube-api-access-cwwkq\") pod \"keystone-operator-controller-manager-747665895-hdcxr\" (UID: \"6cb9eef2-e10a-4a0c-bf29-8ade30f57048\") " pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.815927 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxg95\" (UniqueName: \"kubernetes.io/projected/c01aecec-3545-4b0d-a81f-0440b1cc2c19-kube-api-access-hxg95\") pod \"heat-operator-controller-manager-7bb9679997-2fcdq\" (UID: \"c01aecec-3545-4b0d-a81f-0440b1cc2c19\") " pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.815965 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwtsd\" (UniqueName: \"kubernetes.io/projected/198f1c1f-a452-4e1a-be6a-7bcfbe372441-kube-api-access-zwtsd\") pod \"horizon-operator-controller-manager-769bb6b489-8mwgc\" (UID: \"198f1c1f-a452-4e1a-be6a-7bcfbe372441\") " pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.816007 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqxq6\" (UniqueName: \"kubernetes.io/projected/78aeae10-6ff4-4ec2-9a6e-617b5b774122-kube-api-access-bqxq6\") pod \"ironic-operator-controller-manager-68ccf47b7f-dr6tt\" (UID: \"78aeae10-6ff4-4ec2-9a6e-617b5b774122\") " pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.816035 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w5vl\" (UniqueName: \"kubernetes.io/projected/c162813b-a3c1-4d12-a3ec-5ecb784c56da-kube-api-access-4w5vl\") pod \"manila-operator-controller-manager-8b756d9b7-t9stx\" (UID: \"c162813b-a3c1-4d12-a3ec-5ecb784c56da\") " pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.816077 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:31 crc kubenswrapper[4611]: E0929 12:55:31.816257 4611 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 12:55:31 crc kubenswrapper[4611]: E0929 12:55:31.816315 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert podName:c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6 nodeName:}" failed. No retries permitted until 2025-09-29 12:55:32.316294334 +0000 UTC m=+919.207813940 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert") pod "infra-operator-controller-manager-b758b5fbd-w8r7q" (UID: "c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6") : secret "infra-operator-webhook-server-cert" not found Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.824732 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-q8xl7" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.869469 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-mdxdg" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.876918 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.881329 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxg95\" (UniqueName: \"kubernetes.io/projected/c01aecec-3545-4b0d-a81f-0440b1cc2c19-kube-api-access-hxg95\") pod \"heat-operator-controller-manager-7bb9679997-2fcdq\" (UID: \"c01aecec-3545-4b0d-a81f-0440b1cc2c19\") " pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.889894 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.913132 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9259\" (UniqueName: \"kubernetes.io/projected/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-kube-api-access-j9259\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.917825 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p2rg\" (UniqueName: \"kubernetes.io/projected/b63c6e5e-8aed-46b0-847a-d7a129e56281-kube-api-access-7p2rg\") pod \"nova-operator-controller-manager-cf9757659-9xvq5\" (UID: \"b63c6e5e-8aed-46b0-847a-d7a129e56281\") " pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.917908 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwwkq\" (UniqueName: \"kubernetes.io/projected/6cb9eef2-e10a-4a0c-bf29-8ade30f57048-kube-api-access-cwwkq\") pod \"keystone-operator-controller-manager-747665895-hdcxr\" (UID: \"6cb9eef2-e10a-4a0c-bf29-8ade30f57048\") " pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.917958 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nsxx\" (UniqueName: \"kubernetes.io/projected/043571f9-41a3-4573-a1a5-f50f80be69e9-kube-api-access-6nsxx\") pod \"mariadb-operator-controller-manager-5dfc69dd64-k6z9q\" (UID: \"043571f9-41a3-4573-a1a5-f50f80be69e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.917992 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmt25\" (UniqueName: \"kubernetes.io/projected/20d2ac56-4812-4211-82c2-787ece927b52-kube-api-access-hmt25\") pod \"neutron-operator-controller-manager-5869b4f857-md24f\" (UID: \"20d2ac56-4812-4211-82c2-787ece927b52\") " pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.918058 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w5vl\" (UniqueName: \"kubernetes.io/projected/c162813b-a3c1-4d12-a3ec-5ecb784c56da-kube-api-access-4w5vl\") pod \"manila-operator-controller-manager-8b756d9b7-t9stx\" (UID: \"c162813b-a3c1-4d12-a3ec-5ecb784c56da\") " pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.955780 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqxq6\" (UniqueName: \"kubernetes.io/projected/78aeae10-6ff4-4ec2-9a6e-617b5b774122-kube-api-access-bqxq6\") pod \"ironic-operator-controller-manager-68ccf47b7f-dr6tt\" (UID: \"78aeae10-6ff4-4ec2-9a6e-617b5b774122\") " pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.957855 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f"] Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.958654 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwtsd\" (UniqueName: \"kubernetes.io/projected/198f1c1f-a452-4e1a-be6a-7bcfbe372441-kube-api-access-zwtsd\") pod \"horizon-operator-controller-manager-769bb6b489-8mwgc\" (UID: \"198f1c1f-a452-4e1a-be6a-7bcfbe372441\") " pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:55:31 crc kubenswrapper[4611]: I0929 12:55:31.968401 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.000341 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.005677 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.007089 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.015066 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-pf8s8" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.020781 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p2rg\" (UniqueName: \"kubernetes.io/projected/b63c6e5e-8aed-46b0-847a-d7a129e56281-kube-api-access-7p2rg\") pod \"nova-operator-controller-manager-cf9757659-9xvq5\" (UID: \"b63c6e5e-8aed-46b0-847a-d7a129e56281\") " pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.020859 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nsxx\" (UniqueName: \"kubernetes.io/projected/043571f9-41a3-4573-a1a5-f50f80be69e9-kube-api-access-6nsxx\") pod \"mariadb-operator-controller-manager-5dfc69dd64-k6z9q\" (UID: \"043571f9-41a3-4573-a1a5-f50f80be69e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.020913 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmt25\" (UniqueName: \"kubernetes.io/projected/20d2ac56-4812-4211-82c2-787ece927b52-kube-api-access-hmt25\") pod \"neutron-operator-controller-manager-5869b4f857-md24f\" (UID: \"20d2ac56-4812-4211-82c2-787ece927b52\") " pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.043139 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.045447 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.048320 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwwkq\" (UniqueName: \"kubernetes.io/projected/6cb9eef2-e10a-4a0c-bf29-8ade30f57048-kube-api-access-cwwkq\") pod \"keystone-operator-controller-manager-747665895-hdcxr\" (UID: \"6cb9eef2-e10a-4a0c-bf29-8ade30f57048\") " pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.058875 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.059511 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.060442 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-9t7hb" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.066685 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w5vl\" (UniqueName: \"kubernetes.io/projected/c162813b-a3c1-4d12-a3ec-5ecb784c56da-kube-api-access-4w5vl\") pod \"manila-operator-controller-manager-8b756d9b7-t9stx\" (UID: \"c162813b-a3c1-4d12-a3ec-5ecb784c56da\") " pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.067994 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmt25\" (UniqueName: \"kubernetes.io/projected/20d2ac56-4812-4211-82c2-787ece927b52-kube-api-access-hmt25\") pod \"neutron-operator-controller-manager-5869b4f857-md24f\" (UID: \"20d2ac56-4812-4211-82c2-787ece927b52\") " pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.072844 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nsxx\" (UniqueName: \"kubernetes.io/projected/043571f9-41a3-4573-a1a5-f50f80be69e9-kube-api-access-6nsxx\") pod \"mariadb-operator-controller-manager-5dfc69dd64-k6z9q\" (UID: \"043571f9-41a3-4573-a1a5-f50f80be69e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.080532 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p2rg\" (UniqueName: \"kubernetes.io/projected/b63c6e5e-8aed-46b0-847a-d7a129e56281-kube-api-access-7p2rg\") pod \"nova-operator-controller-manager-cf9757659-9xvq5\" (UID: \"b63c6e5e-8aed-46b0-847a-d7a129e56281\") " pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.088501 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.120112 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.121260 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.123929 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-tj5w7" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.126371 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smfcs\" (UniqueName: \"kubernetes.io/projected/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-kube-api-access-smfcs\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.126417 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.126499 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh2kd\" (UniqueName: \"kubernetes.io/projected/9774896f-6656-4059-84b3-1e40fe0b5a30-kube-api-access-sh2kd\") pod \"octavia-operator-controller-manager-577fccdf59-w6qg5\" (UID: \"9774896f-6656-4059-84b3-1e40fe0b5a30\") " pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.141214 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.155081 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.175678 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.181477 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.184086 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.192849 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-npr45" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.205405 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.220167 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.221700 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-857777455b-wqpzn"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.223411 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.232167 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.232521 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-pbzrv" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.233242 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smfcs\" (UniqueName: \"kubernetes.io/projected/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-kube-api-access-smfcs\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.233281 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.233332 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rkk9\" (UniqueName: \"kubernetes.io/projected/37ff3b92-ad5d-43ef-a942-b4dcd472c9c5-kube-api-access-5rkk9\") pod \"ovn-operator-controller-manager-645f75c974-l5dcb\" (UID: \"37ff3b92-ad5d-43ef-a942-b4dcd472c9c5\") " pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.233359 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh2kd\" (UniqueName: \"kubernetes.io/projected/9774896f-6656-4059-84b3-1e40fe0b5a30-kube-api-access-sh2kd\") pod \"octavia-operator-controller-manager-577fccdf59-w6qg5\" (UID: \"9774896f-6656-4059-84b3-1e40fe0b5a30\") " pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.233989 4611 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.234040 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert podName:eaa8a3d9-b8aa-4524-9e85-3e56463484f8 nodeName:}" failed. No retries permitted until 2025-09-29 12:55:32.734024026 +0000 UTC m=+919.625543632 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert") pod "openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" (UID: "eaa8a3d9-b8aa-4524-9e85-3e56463484f8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.252946 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.266682 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smfcs\" (UniqueName: \"kubernetes.io/projected/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-kube-api-access-smfcs\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.268740 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-857777455b-wqpzn"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.270371 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh2kd\" (UniqueName: \"kubernetes.io/projected/9774896f-6656-4059-84b3-1e40fe0b5a30-kube-api-access-sh2kd\") pod \"octavia-operator-controller-manager-577fccdf59-w6qg5\" (UID: \"9774896f-6656-4059-84b3-1e40fe0b5a30\") " pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.298332 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.299858 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.304171 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-ds92n" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.311596 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.317894 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.325313 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.328513 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-bdd59" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.328676 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.333273 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.338690 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.338743 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwkg5\" (UniqueName: \"kubernetes.io/projected/2348e002-3282-492c-a309-3e5b9eacfefd-kube-api-access-mwkg5\") pod \"swift-operator-controller-manager-857777455b-wqpzn\" (UID: \"2348e002-3282-492c-a309-3e5b9eacfefd\") " pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.338775 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rkk9\" (UniqueName: \"kubernetes.io/projected/37ff3b92-ad5d-43ef-a942-b4dcd472c9c5-kube-api-access-5rkk9\") pod \"ovn-operator-controller-manager-645f75c974-l5dcb\" (UID: \"37ff3b92-ad5d-43ef-a942-b4dcd472c9c5\") " pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.338880 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztl7d\" (UniqueName: \"kubernetes.io/projected/191225c7-d674-4c8d-9a15-7704f1dc80fb-kube-api-access-ztl7d\") pod \"placement-operator-controller-manager-5d8d5f5cf9-jvqfh\" (UID: \"191225c7-d674-4c8d-9a15-7704f1dc80fb\") " pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.339081 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.339835 4611 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.339880 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert podName:c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6 nodeName:}" failed. No retries permitted until 2025-09-29 12:55:33.339862727 +0000 UTC m=+920.231382333 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert") pod "infra-operator-controller-manager-b758b5fbd-w8r7q" (UID: "c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6") : secret "infra-operator-webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.340877 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.423343 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rkk9\" (UniqueName: \"kubernetes.io/projected/37ff3b92-ad5d-43ef-a942-b4dcd472c9c5-kube-api-access-5rkk9\") pod \"ovn-operator-controller-manager-645f75c974-l5dcb\" (UID: \"37ff3b92-ad5d-43ef-a942-b4dcd472c9c5\") " pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.441903 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nvw9\" (UniqueName: \"kubernetes.io/projected/5feb7075-b56c-40a0-bab9-9205bcc973f0-kube-api-access-8nvw9\") pod \"telemetry-operator-controller-manager-6fdf4565bc-8d47j\" (UID: \"5feb7075-b56c-40a0-bab9-9205bcc973f0\") " pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.441955 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6phpd\" (UniqueName: \"kubernetes.io/projected/4365233e-5b3b-4d90-8497-32deefcdc842-kube-api-access-6phpd\") pod \"test-operator-controller-manager-5f4f4847c9-tq7mf\" (UID: \"4365233e-5b3b-4d90-8497-32deefcdc842\") " pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.441994 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztl7d\" (UniqueName: \"kubernetes.io/projected/191225c7-d674-4c8d-9a15-7704f1dc80fb-kube-api-access-ztl7d\") pod \"placement-operator-controller-manager-5d8d5f5cf9-jvqfh\" (UID: \"191225c7-d674-4c8d-9a15-7704f1dc80fb\") " pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.442063 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwkg5\" (UniqueName: \"kubernetes.io/projected/2348e002-3282-492c-a309-3e5b9eacfefd-kube-api-access-mwkg5\") pod \"swift-operator-controller-manager-857777455b-wqpzn\" (UID: \"2348e002-3282-492c-a309-3e5b9eacfefd\") " pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.466638 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.491405 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwkg5\" (UniqueName: \"kubernetes.io/projected/2348e002-3282-492c-a309-3e5b9eacfefd-kube-api-access-mwkg5\") pod \"swift-operator-controller-manager-857777455b-wqpzn\" (UID: \"2348e002-3282-492c-a309-3e5b9eacfefd\") " pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.498886 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztl7d\" (UniqueName: \"kubernetes.io/projected/191225c7-d674-4c8d-9a15-7704f1dc80fb-kube-api-access-ztl7d\") pod \"placement-operator-controller-manager-5d8d5f5cf9-jvqfh\" (UID: \"191225c7-d674-4c8d-9a15-7704f1dc80fb\") " pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.516508 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.517674 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.524537 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-gsrq8" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.524792 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.543296 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nvw9\" (UniqueName: \"kubernetes.io/projected/5feb7075-b56c-40a0-bab9-9205bcc973f0-kube-api-access-8nvw9\") pod \"telemetry-operator-controller-manager-6fdf4565bc-8d47j\" (UID: \"5feb7075-b56c-40a0-bab9-9205bcc973f0\") " pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.543360 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6phpd\" (UniqueName: \"kubernetes.io/projected/4365233e-5b3b-4d90-8497-32deefcdc842-kube-api-access-6phpd\") pod \"test-operator-controller-manager-5f4f4847c9-tq7mf\" (UID: \"4365233e-5b3b-4d90-8497-32deefcdc842\") " pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.547531 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.547922 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.592895 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6phpd\" (UniqueName: \"kubernetes.io/projected/4365233e-5b3b-4d90-8497-32deefcdc842-kube-api-access-6phpd\") pod \"test-operator-controller-manager-5f4f4847c9-tq7mf\" (UID: \"4365233e-5b3b-4d90-8497-32deefcdc842\") " pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.594222 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.601191 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nvw9\" (UniqueName: \"kubernetes.io/projected/5feb7075-b56c-40a0-bab9-9205bcc973f0-kube-api-access-8nvw9\") pod \"telemetry-operator-controller-manager-6fdf4565bc-8d47j\" (UID: \"5feb7075-b56c-40a0-bab9-9205bcc973f0\") " pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.638034 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.647578 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xngk7\" (UniqueName: \"kubernetes.io/projected/684aa388-0688-47d5-94fc-3dc35ee44c84-kube-api-access-xngk7\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.647653 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.659236 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.660317 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.698888 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-m2t27" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.700311 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.743094 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs"] Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.771560 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.771607 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt5vv\" (UniqueName: \"kubernetes.io/projected/4ec018f9-0388-4dac-af1d-75d43cfc0f89-kube-api-access-wt5vv\") pod \"rabbitmq-cluster-operator-manager-79d8469568-hkpgs\" (UID: \"4ec018f9-0388-4dac-af1d-75d43cfc0f89\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.771693 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xngk7\" (UniqueName: \"kubernetes.io/projected/684aa388-0688-47d5-94fc-3dc35ee44c84-kube-api-access-xngk7\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.771769 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.771973 4611 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.772031 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert podName:684aa388-0688-47d5-94fc-3dc35ee44c84 nodeName:}" failed. No retries permitted until 2025-09-29 12:55:33.272011795 +0000 UTC m=+920.163531401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert") pod "openstack-operator-controller-manager-754df57b6f-4hjqs" (UID: "684aa388-0688-47d5-94fc-3dc35ee44c84") : secret "webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.779318 4611 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: E0929 12:55:32.779414 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert podName:eaa8a3d9-b8aa-4524-9e85-3e56463484f8 nodeName:}" failed. No retries permitted until 2025-09-29 12:55:33.779393808 +0000 UTC m=+920.670913414 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert") pod "openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" (UID: "eaa8a3d9-b8aa-4524-9e85-3e56463484f8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.811557 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xngk7\" (UniqueName: \"kubernetes.io/projected/684aa388-0688-47d5-94fc-3dc35ee44c84-kube-api-access-xngk7\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.874295 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt5vv\" (UniqueName: \"kubernetes.io/projected/4ec018f9-0388-4dac-af1d-75d43cfc0f89-kube-api-access-wt5vv\") pod \"rabbitmq-cluster-operator-manager-79d8469568-hkpgs\" (UID: \"4ec018f9-0388-4dac-af1d-75d43cfc0f89\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.902744 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt5vv\" (UniqueName: \"kubernetes.io/projected/4ec018f9-0388-4dac-af1d-75d43cfc0f89-kube-api-access-wt5vv\") pod \"rabbitmq-cluster-operator-manager-79d8469568-hkpgs\" (UID: \"4ec018f9-0388-4dac-af1d-75d43cfc0f89\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" Sep 29 12:55:32 crc kubenswrapper[4611]: I0929 12:55:32.929388 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.078564 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv"] Sep 29 12:55:33 crc kubenswrapper[4611]: W0929 12:55:33.101952 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f637c90_4822_4587_922d_3dbf2240977b.slice/crio-e64c04476b3fc1c7889856a48adc76fcbb71d4eecf824526957fb620a6c02ada WatchSource:0}: Error finding container e64c04476b3fc1c7889856a48adc76fcbb71d4eecf824526957fb620a6c02ada: Status 404 returned error can't find the container with id e64c04476b3fc1c7889856a48adc76fcbb71d4eecf824526957fb620a6c02ada Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.104141 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.112250 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.154397 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.191416 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.288048 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:33 crc kubenswrapper[4611]: E0929 12:55:33.288263 4611 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 12:55:33 crc kubenswrapper[4611]: E0929 12:55:33.288340 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert podName:684aa388-0688-47d5-94fc-3dc35ee44c84 nodeName:}" failed. No retries permitted until 2025-09-29 12:55:34.288322678 +0000 UTC m=+921.179842284 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert") pod "openstack-operator-controller-manager-754df57b6f-4hjqs" (UID: "684aa388-0688-47d5-94fc-3dc35ee44c84") : secret "webhook-server-cert" not found Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.321064 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.390227 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.394758 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6-cert\") pod \"infra-operator-controller-manager-b758b5fbd-w8r7q\" (UID: \"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6\") " pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.467578 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.488616 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.494166 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.499950 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f"] Sep 29 12:55:33 crc kubenswrapper[4611]: W0929 12:55:33.513226 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e145eda_2d1e_414d_b09c_b78dc328af46.slice/crio-ab9d7f746f18b22fe8786d4297c863e3899c668295a8f8acd8a04e4a41094e5f WatchSource:0}: Error finding container ab9d7f746f18b22fe8786d4297c863e3899c668295a8f8acd8a04e4a41094e5f: Status 404 returned error can't find the container with id ab9d7f746f18b22fe8786d4297c863e3899c668295a8f8acd8a04e4a41094e5f Sep 29 12:55:33 crc kubenswrapper[4611]: W0929 12:55:33.515175 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod043571f9_41a3_4573_a1a5_f50f80be69e9.slice/crio-af151b18d4a638d728b9731731fb66d46a1b1d0de3028210a6fcb96bc5607a02 WatchSource:0}: Error finding container af151b18d4a638d728b9731731fb66d46a1b1d0de3028210a6fcb96bc5607a02: Status 404 returned error can't find the container with id af151b18d4a638d728b9731731fb66d46a1b1d0de3028210a6fcb96bc5607a02 Sep 29 12:55:33 crc kubenswrapper[4611]: W0929 12:55:33.519698 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20d2ac56_4812_4211_82c2_787ece927b52.slice/crio-dab12a3e503e8d81ecf75aface596dfe3632fc45a6de347fe060e03be0cd8b5f WatchSource:0}: Error finding container dab12a3e503e8d81ecf75aface596dfe3632fc45a6de347fe060e03be0cd8b5f: Status 404 returned error can't find the container with id dab12a3e503e8d81ecf75aface596dfe3632fc45a6de347fe060e03be0cd8b5f Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.698498 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-747665895-hdcxr"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.706181 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.711858 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx"] Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.797468 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.805033 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eaa8a3d9-b8aa-4524-9e85-3e56463484f8-cert\") pod \"openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl\" (UID: \"eaa8a3d9-b8aa-4524-9e85-3e56463484f8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.895446 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.937426 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" event={"ID":"6cb9eef2-e10a-4a0c-bf29-8ade30f57048","Type":"ContainerStarted","Data":"3f59b5d0eb2d7b5c6d4dff4a908e39e67664ce472e8f8a953e0950aa5c238ca9"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.944997 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" event={"ID":"6e145eda-2d1e-414d-b09c-b78dc328af46","Type":"ContainerStarted","Data":"ab9d7f746f18b22fe8786d4297c863e3899c668295a8f8acd8a04e4a41094e5f"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.961511 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" event={"ID":"c01aecec-3545-4b0d-a81f-0440b1cc2c19","Type":"ContainerStarted","Data":"5d5705a27279a067663d17229df9db45182c5496cd8a1351db951a80bda4ddd0"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.967967 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" event={"ID":"78aeae10-6ff4-4ec2-9a6e-617b5b774122","Type":"ContainerStarted","Data":"f391723f036cdcf3bf319949b43b39ff553695c2ba6a4982e40eb7565184e5b0"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.971422 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" event={"ID":"09ccaa74-5871-4408-8476-54e35b95a774","Type":"ContainerStarted","Data":"2f99e922640756dd8a8e84cacd82badf3107ff802170564fabbaec231ecc9c23"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.976564 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" event={"ID":"9774896f-6656-4059-84b3-1e40fe0b5a30","Type":"ContainerStarted","Data":"66648e6afbed47695488e3e9e43d0a5ec6c0955c60ee92621adf0cce50aae5d0"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.988276 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" event={"ID":"043571f9-41a3-4573-a1a5-f50f80be69e9","Type":"ContainerStarted","Data":"af151b18d4a638d728b9731731fb66d46a1b1d0de3028210a6fcb96bc5607a02"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.994242 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" event={"ID":"5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b","Type":"ContainerStarted","Data":"0468fb51583b75fd1530dd911b7f30d296da3a532099dfa6e4cd66eca3eab18f"} Sep 29 12:55:33 crc kubenswrapper[4611]: I0929 12:55:33.998649 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" event={"ID":"c162813b-a3c1-4d12-a3ec-5ecb784c56da","Type":"ContainerStarted","Data":"b68196bb0904e759c8f0104b9556a8c3c4bf482a6bff9c9724549e168c4b3680"} Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.003668 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" event={"ID":"4f637c90-4822-4587-922d-3dbf2240977b","Type":"ContainerStarted","Data":"e64c04476b3fc1c7889856a48adc76fcbb71d4eecf824526957fb620a6c02ada"} Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.005452 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" event={"ID":"20d2ac56-4812-4211-82c2-787ece927b52","Type":"ContainerStarted","Data":"dab12a3e503e8d81ecf75aface596dfe3632fc45a6de347fe060e03be0cd8b5f"} Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.108760 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc"] Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.138707 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb"] Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.154410 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh"] Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.182615 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j"] Sep 29 12:55:34 crc kubenswrapper[4611]: W0929 12:55:34.193083 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4365233e_5b3b_4d90_8497_32deefcdc842.slice/crio-39e6cedb7ccea98554950aba865897998446a6f52ffe94bb8c57c749457aec61 WatchSource:0}: Error finding container 39e6cedb7ccea98554950aba865897998446a6f52ffe94bb8c57c749457aec61: Status 404 returned error can't find the container with id 39e6cedb7ccea98554950aba865897998446a6f52ffe94bb8c57c749457aec61 Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.193150 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-857777455b-wqpzn"] Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.252881 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:7c549f735f9064b3706e94b1486fa6a866ded8af7f6263c0408784595fd17f44,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mwkg5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-857777455b-wqpzn_openstack-operators(2348e002-3282-492c-a309-3e5b9eacfefd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.262195 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5"] Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.272172 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:e2dd700b6454f0f1a0a7aae913c5a09462e44c1d8967fddcce641d0d99b3d13d,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ztl7d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5d8d5f5cf9-jvqfh_openstack-operators(191225c7-d674-4c8d-9a15-7704f1dc80fb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.272345 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:882e4fa832e5af2c6d8c55b99948a44c7bcfc48d99a08b36fe0bb35fa3e86caf,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j9259,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-b758b5fbd-w8r7q_openstack-operators(c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.279869 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf"] Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.282310 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:141a82e4684502adce96515fb42bab774da715d0717db6a14f2c4d987d40ffbb,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8nvw9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-6fdf4565bc-8d47j_openstack-operators(5feb7075-b56c-40a0-bab9-9205bcc973f0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.284537 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:21212fd7121eeac952281b778b9eec7d909699146d40593120e3336e1e5907e0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zwtsd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-769bb6b489-8mwgc_openstack-operators(198f1c1f-a452-4e1a-be6a-7bcfbe372441): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.298281 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs"] Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.309187 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.323942 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/684aa388-0688-47d5-94fc-3dc35ee44c84-cert\") pod \"openstack-operator-controller-manager-754df57b6f-4hjqs\" (UID: \"684aa388-0688-47d5-94fc-3dc35ee44c84\") " pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.337136 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q"] Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.424757 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:34 crc kubenswrapper[4611]: I0929 12:55:34.517836 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl"] Sep 29 12:55:34 crc kubenswrapper[4611]: W0929 12:55:34.569380 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeaa8a3d9_b8aa_4524_9e85_3e56463484f8.slice/crio-97dcf9dd7d8d3d4821bc33a998d4c56d501000c58aebc8ff26943d8a5355d3a2 WatchSource:0}: Error finding container 97dcf9dd7d8d3d4821bc33a998d4c56d501000c58aebc8ff26943d8a5355d3a2: Status 404 returned error can't find the container with id 97dcf9dd7d8d3d4821bc33a998d4c56d501000c58aebc8ff26943d8a5355d3a2 Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.633474 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" podUID="2348e002-3282-492c-a309-3e5b9eacfefd" Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.691904 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" podUID="c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6" Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.804890 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" podUID="191225c7-d674-4c8d-9a15-7704f1dc80fb" Sep 29 12:55:34 crc kubenswrapper[4611]: E0929 12:55:34.817222 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" podUID="198f1c1f-a452-4e1a-be6a-7bcfbe372441" Sep 29 12:55:35 crc kubenswrapper[4611]: E0929 12:55:35.000926 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" podUID="5feb7075-b56c-40a0-bab9-9205bcc973f0" Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.045135 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs"] Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.101377 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" event={"ID":"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6","Type":"ContainerStarted","Data":"56be15575d1a6afed93dc4bad05186e48a5a19d14d18ec94985da33371ba8879"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.101513 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" event={"ID":"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6","Type":"ContainerStarted","Data":"8d4266498a6d66e0b11580188534999e9c415dc0202a25990bb8936a578ad824"} Sep 29 12:55:35 crc kubenswrapper[4611]: E0929 12:55:35.113936 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:882e4fa832e5af2c6d8c55b99948a44c7bcfc48d99a08b36fe0bb35fa3e86caf\\\"\"" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" podUID="c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6" Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.192311 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" event={"ID":"37ff3b92-ad5d-43ef-a942-b4dcd472c9c5","Type":"ContainerStarted","Data":"1809b2c5a676dd7638d23f57636e1ec01009110e2d3984c32f663c3ff77a665c"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.230895 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" event={"ID":"4ec018f9-0388-4dac-af1d-75d43cfc0f89","Type":"ContainerStarted","Data":"7474f831e0f3db92896e78641d80b70aeb5e975c991ca011a0df3034198327c3"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.300723 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" event={"ID":"2348e002-3282-492c-a309-3e5b9eacfefd","Type":"ContainerStarted","Data":"8e5bbb079b399af22d792eb55d59f2b87198119c695d9fc67178acb642068437"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.300783 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" event={"ID":"2348e002-3282-492c-a309-3e5b9eacfefd","Type":"ContainerStarted","Data":"6565f467a98bf0998788cb2dfaa6961038ae9f3ea9a2a3e4bbe6b126f892e8bb"} Sep 29 12:55:35 crc kubenswrapper[4611]: E0929 12:55:35.304677 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:7c549f735f9064b3706e94b1486fa6a866ded8af7f6263c0408784595fd17f44\\\"\"" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" podUID="2348e002-3282-492c-a309-3e5b9eacfefd" Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.310016 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" event={"ID":"b63c6e5e-8aed-46b0-847a-d7a129e56281","Type":"ContainerStarted","Data":"2c6bdf2255fd7f73bd7d2e57ddb820f21f3c667bd153ff52f89e8a5fb38ae4f2"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.315776 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" event={"ID":"191225c7-d674-4c8d-9a15-7704f1dc80fb","Type":"ContainerStarted","Data":"b2448e54cd8f6c9eb74c46699fda6d0960284aab02eefe78bcddc98144188dca"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.315818 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" event={"ID":"191225c7-d674-4c8d-9a15-7704f1dc80fb","Type":"ContainerStarted","Data":"dabce45c5a6b92a72b2b0d575fafdfdc81e2091ae54c6bac061769ca10cd941d"} Sep 29 12:55:35 crc kubenswrapper[4611]: E0929 12:55:35.344859 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e2dd700b6454f0f1a0a7aae913c5a09462e44c1d8967fddcce641d0d99b3d13d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" podUID="191225c7-d674-4c8d-9a15-7704f1dc80fb" Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.346221 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" event={"ID":"5feb7075-b56c-40a0-bab9-9205bcc973f0","Type":"ContainerStarted","Data":"8e24c3931297de69b3a6c4015b4074bb4a2bf41478340435fa49c8efa2ebd7ac"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.346255 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" event={"ID":"5feb7075-b56c-40a0-bab9-9205bcc973f0","Type":"ContainerStarted","Data":"4105a0c335dc80df0ab6269de464c5bd5cb3712b39a8473b4f1e6170c4975aef"} Sep 29 12:55:35 crc kubenswrapper[4611]: E0929 12:55:35.375263 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:141a82e4684502adce96515fb42bab774da715d0717db6a14f2c4d987d40ffbb\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" podUID="5feb7075-b56c-40a0-bab9-9205bcc973f0" Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.398431 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" event={"ID":"eaa8a3d9-b8aa-4524-9e85-3e56463484f8","Type":"ContainerStarted","Data":"97dcf9dd7d8d3d4821bc33a998d4c56d501000c58aebc8ff26943d8a5355d3a2"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.455489 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" event={"ID":"4365233e-5b3b-4d90-8497-32deefcdc842","Type":"ContainerStarted","Data":"39e6cedb7ccea98554950aba865897998446a6f52ffe94bb8c57c749457aec61"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.468343 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" event={"ID":"198f1c1f-a452-4e1a-be6a-7bcfbe372441","Type":"ContainerStarted","Data":"6ba0740679397830d252578deb0f0ad66f38849fa72395ab376324b0678758c6"} Sep 29 12:55:35 crc kubenswrapper[4611]: I0929 12:55:35.468407 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" event={"ID":"198f1c1f-a452-4e1a-be6a-7bcfbe372441","Type":"ContainerStarted","Data":"4c803cc5e13c27a698b2faf1034f4e4136b1c353624f80b64823a2e2aa6e61ba"} Sep 29 12:55:35 crc kubenswrapper[4611]: E0929 12:55:35.493700 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:21212fd7121eeac952281b778b9eec7d909699146d40593120e3336e1e5907e0\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" podUID="198f1c1f-a452-4e1a-be6a-7bcfbe372441" Sep 29 12:55:36 crc kubenswrapper[4611]: I0929 12:55:36.560059 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" event={"ID":"684aa388-0688-47d5-94fc-3dc35ee44c84","Type":"ContainerStarted","Data":"df9000e8d2577b5c6cdd15ae3323c05beedb53aa4ff6178cb56e3ab1301ccee0"} Sep 29 12:55:36 crc kubenswrapper[4611]: I0929 12:55:36.560410 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" event={"ID":"684aa388-0688-47d5-94fc-3dc35ee44c84","Type":"ContainerStarted","Data":"d1b3cf1496b345f03ff3664cbcb317545bd1d5c49a730a57ac297af08732f914"} Sep 29 12:55:36 crc kubenswrapper[4611]: I0929 12:55:36.560436 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:36 crc kubenswrapper[4611]: I0929 12:55:36.560447 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" event={"ID":"684aa388-0688-47d5-94fc-3dc35ee44c84","Type":"ContainerStarted","Data":"af5b41f3f87b9c5fa72ac54902b0e7a1779c435b2fcf7b668aafccb14794b1d5"} Sep 29 12:55:36 crc kubenswrapper[4611]: E0929 12:55:36.560492 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:882e4fa832e5af2c6d8c55b99948a44c7bcfc48d99a08b36fe0bb35fa3e86caf\\\"\"" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" podUID="c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6" Sep 29 12:55:36 crc kubenswrapper[4611]: E0929 12:55:36.567127 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:21212fd7121eeac952281b778b9eec7d909699146d40593120e3336e1e5907e0\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" podUID="198f1c1f-a452-4e1a-be6a-7bcfbe372441" Sep 29 12:55:36 crc kubenswrapper[4611]: E0929 12:55:36.567217 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:7c549f735f9064b3706e94b1486fa6a866ded8af7f6263c0408784595fd17f44\\\"\"" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" podUID="2348e002-3282-492c-a309-3e5b9eacfefd" Sep 29 12:55:36 crc kubenswrapper[4611]: E0929 12:55:36.567271 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:141a82e4684502adce96515fb42bab774da715d0717db6a14f2c4d987d40ffbb\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" podUID="5feb7075-b56c-40a0-bab9-9205bcc973f0" Sep 29 12:55:36 crc kubenswrapper[4611]: E0929 12:55:36.567296 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e2dd700b6454f0f1a0a7aae913c5a09462e44c1d8967fddcce641d0d99b3d13d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" podUID="191225c7-d674-4c8d-9a15-7704f1dc80fb" Sep 29 12:55:36 crc kubenswrapper[4611]: I0929 12:55:36.751741 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" podStartSLOduration=4.751725609 podStartE2EDuration="4.751725609s" podCreationTimestamp="2025-09-29 12:55:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:55:36.746302353 +0000 UTC m=+923.637821969" watchObservedRunningTime="2025-09-29 12:55:36.751725609 +0000 UTC m=+923.643245205" Sep 29 12:55:44 crc kubenswrapper[4611]: I0929 12:55:44.430334 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-754df57b6f-4hjqs" Sep 29 12:55:52 crc kubenswrapper[4611]: E0929 12:55:52.060703 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:24ce2afd2fe5e4e059512e3f9bf364b370fe6301610db19145d2b61c485fbd3a" Sep 29 12:55:52 crc kubenswrapper[4611]: E0929 12:55:52.061559 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:24ce2afd2fe5e4e059512e3f9bf364b370fe6301610db19145d2b61c485fbd3a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7mtxx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-7c68997f6b-thmfm_openstack-operators(5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:53 crc kubenswrapper[4611]: E0929 12:55:53.337018 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:759b6819170324524e39ee25d0fe171e6a9a638e62904944a596cfab42481ef7" Sep 29 12:55:53 crc kubenswrapper[4611]: E0929 12:55:53.337589 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:759b6819170324524e39ee25d0fe171e6a9a638e62904944a596cfab42481ef7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6phpd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5f4f4847c9-tq7mf_openstack-operators(4365233e-5b3b-4d90-8497-32deefcdc842): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:53 crc kubenswrapper[4611]: E0929 12:55:53.729738 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:fb5f2fb9bf2089a6b616be81954d0e6130f91d949c8cfda816c926cc48fd903c" Sep 29 12:55:53 crc kubenswrapper[4611]: E0929 12:55:53.729914 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:fb5f2fb9bf2089a6b616be81954d0e6130f91d949c8cfda816c926cc48fd903c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n5bgz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7fdd8988b7-527vk_openstack-operators(4f637c90-4822-4587-922d-3dbf2240977b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:54 crc kubenswrapper[4611]: E0929 12:55:54.128131 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:d0af12270460533c528c8ef9de21b9a191648efa8787604862646a33f4e950ee" Sep 29 12:55:54 crc kubenswrapper[4611]: E0929 12:55:54.128607 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:d0af12270460533c528c8ef9de21b9a191648efa8787604862646a33f4e950ee,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5rkk9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-645f75c974-l5dcb_openstack-operators(37ff3b92-ad5d-43ef-a942-b4dcd472c9c5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:54 crc kubenswrapper[4611]: E0929 12:55:54.517487 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:dcd2893c765f69d05481eb20f3a0244c8abc9fa67f91bdd81532555cedd3acd2" Sep 29 12:55:54 crc kubenswrapper[4611]: E0929 12:55:54.518038 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:dcd2893c765f69d05481eb20f3a0244c8abc9fa67f91bdd81532555cedd3acd2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bqxq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-68ccf47b7f-dr6tt_openstack-operators(78aeae10-6ff4-4ec2-9a6e-617b5b774122): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:54 crc kubenswrapper[4611]: E0929 12:55:54.919558 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:da324acd36c9c781d8769b413da12c4d281c3e400b4165e0e5c8a75a8cc5edc5" Sep 29 12:55:54 crc kubenswrapper[4611]: E0929 12:55:54.919753 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:da324acd36c9c781d8769b413da12c4d281c3e400b4165e0e5c8a75a8cc5edc5,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7p2rg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-cf9757659-9xvq5_openstack-operators(b63c6e5e-8aed-46b0-847a-d7a129e56281): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:55 crc kubenswrapper[4611]: E0929 12:55:55.276054 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a857f1d48b6bb38e77997baf7e7b7a9930aacab912b42ac10d1023d7e4e1dad0" Sep 29 12:55:55 crc kubenswrapper[4611]: E0929 12:55:55.276571 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a857f1d48b6bb38e77997baf7e7b7a9930aacab912b42ac10d1023d7e4e1dad0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-smfcs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl_openstack-operators(eaa8a3d9-b8aa-4524-9e85-3e56463484f8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:55 crc kubenswrapper[4611]: E0929 12:55:55.755906 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b" Sep 29 12:55:55 crc kubenswrapper[4611]: E0929 12:55:55.756446 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wt5vv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-hkpgs_openstack-operators(4ec018f9-0388-4dac-af1d-75d43cfc0f89): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:55:55 crc kubenswrapper[4611]: E0929 12:55:55.757612 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" podUID="4ec018f9-0388-4dac-af1d-75d43cfc0f89" Sep 29 12:55:55 crc kubenswrapper[4611]: E0929 12:55:55.801972 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" podUID="4ec018f9-0388-4dac-af1d-75d43cfc0f89" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.425470 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" podUID="eaa8a3d9-b8aa-4524-9e85-3e56463484f8" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.464364 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" podUID="37ff3b92-ad5d-43ef-a942-b4dcd472c9c5" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.565051 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" podUID="4f637c90-4822-4587-922d-3dbf2240977b" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.676064 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" podUID="78aeae10-6ff4-4ec2-9a6e-617b5b774122" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.712435 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" podUID="4365233e-5b3b-4d90-8497-32deefcdc842" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.713048 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" podUID="b63c6e5e-8aed-46b0-847a-d7a129e56281" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.832353 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" event={"ID":"043571f9-41a3-4573-a1a5-f50f80be69e9","Type":"ContainerStarted","Data":"4bdda3521fd5f5ed68864b656b751693b99ca78fd19510146ae019e79486cfc1"} Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.851124 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" event={"ID":"c01aecec-3545-4b0d-a81f-0440b1cc2c19","Type":"ContainerStarted","Data":"c4337cfe03f9e908e218c3c3c8b05bf6428f937ec95884d2bc6868b7cf308fda"} Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.860170 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" event={"ID":"5feb7075-b56c-40a0-bab9-9205bcc973f0","Type":"ContainerStarted","Data":"42c7172f52d1449882cf521a23e8f24bc5e31f79ac86e052eceeb8ce938bb9d1"} Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.860403 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.863003 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" event={"ID":"6e145eda-2d1e-414d-b09c-b78dc328af46","Type":"ContainerStarted","Data":"f12aa7cfd1427abfb807995dd10776421181f4ecd984dac0933b3116f4f46f7b"} Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.867856 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" podUID="5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.886488 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" event={"ID":"4f637c90-4822-4587-922d-3dbf2240977b","Type":"ContainerStarted","Data":"14861e983e86975007a172bce094be4d8afdcae688c0782bc2623b73fc4a03ef"} Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.888503 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:fb5f2fb9bf2089a6b616be81954d0e6130f91d949c8cfda816c926cc48fd903c\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" podUID="4f637c90-4822-4587-922d-3dbf2240977b" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.900244 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" event={"ID":"191225c7-d674-4c8d-9a15-7704f1dc80fb","Type":"ContainerStarted","Data":"d74ce178be7ee5eb78a48565884770d0a0ede27c53641bc64208206e9ff0864a"} Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.900476 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.908983 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" event={"ID":"78aeae10-6ff4-4ec2-9a6e-617b5b774122","Type":"ContainerStarted","Data":"c9a27b6584eefe4f2ee46627706fbb30d48057dddb2461dae9ad8a04e6342586"} Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.909301 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" podStartSLOduration=4.108596891 podStartE2EDuration="27.909292166s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.2815555 +0000 UTC m=+921.173075106" lastFinishedPulling="2025-09-29 12:55:58.082250775 +0000 UTC m=+944.973770381" observedRunningTime="2025-09-29 12:55:58.903666144 +0000 UTC m=+945.795185750" watchObservedRunningTime="2025-09-29 12:55:58.909292166 +0000 UTC m=+945.800811772" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.911798 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:dcd2893c765f69d05481eb20f3a0244c8abc9fa67f91bdd81532555cedd3acd2\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" podUID="78aeae10-6ff4-4ec2-9a6e-617b5b774122" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.929282 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" event={"ID":"4365233e-5b3b-4d90-8497-32deefcdc842","Type":"ContainerStarted","Data":"a2bb3214b82ff221d8e3c1eaf0fee7793ea24543f86e57dc097ee28576d1dc03"} Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.931017 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:759b6819170324524e39ee25d0fe171e6a9a638e62904944a596cfab42481ef7\\\"\"" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" podUID="4365233e-5b3b-4d90-8497-32deefcdc842" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.936433 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" event={"ID":"37ff3b92-ad5d-43ef-a942-b4dcd472c9c5","Type":"ContainerStarted","Data":"08e53bd0c1d49d870986d8cac891ac44cc93aed6fd5e0a741f7e1be35bb42335"} Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.955270 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" event={"ID":"eaa8a3d9-b8aa-4524-9e85-3e56463484f8","Type":"ContainerStarted","Data":"62c1fc0b005fe3f91f55d37edea235a4dd373c7c75539d744573ab99e7ac43a5"} Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.963512 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:d0af12270460533c528c8ef9de21b9a191648efa8787604862646a33f4e950ee\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" podUID="37ff3b92-ad5d-43ef-a942-b4dcd472c9c5" Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.965749 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a857f1d48b6bb38e77997baf7e7b7a9930aacab912b42ac10d1023d7e4e1dad0\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" podUID="eaa8a3d9-b8aa-4524-9e85-3e56463484f8" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.970115 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" event={"ID":"b63c6e5e-8aed-46b0-847a-d7a129e56281","Type":"ContainerStarted","Data":"cd771fb824f545720c5f356fa4e5161adda3228049739a4d9a7e8cf02e202e58"} Sep 29 12:55:58 crc kubenswrapper[4611]: E0929 12:55:58.971783 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:da324acd36c9c781d8769b413da12c4d281c3e400b4165e0e5c8a75a8cc5edc5\\\"\"" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" podUID="b63c6e5e-8aed-46b0-847a-d7a129e56281" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.972431 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" podStartSLOduration=4.122953964 podStartE2EDuration="27.972421156s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.271919862 +0000 UTC m=+921.163439468" lastFinishedPulling="2025-09-29 12:55:58.121387054 +0000 UTC m=+945.012906660" observedRunningTime="2025-09-29 12:55:58.970507361 +0000 UTC m=+945.862026967" watchObservedRunningTime="2025-09-29 12:55:58.972421156 +0000 UTC m=+945.863940762" Sep 29 12:55:58 crc kubenswrapper[4611]: I0929 12:55:58.988929 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" event={"ID":"c162813b-a3c1-4d12-a3ec-5ecb784c56da","Type":"ContainerStarted","Data":"f7e2d60cc3e42fd280266c305bb8a9387fcdcdab11316ac9d8498dc841d13e88"} Sep 29 12:55:59 crc kubenswrapper[4611]: I0929 12:55:59.019923 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" event={"ID":"6cb9eef2-e10a-4a0c-bf29-8ade30f57048","Type":"ContainerStarted","Data":"725e97dcf068032cdeeb0c1e51d37ff2edc1d8a3a98a67fdce34ee269310e318"} Sep 29 12:55:59 crc kubenswrapper[4611]: I0929 12:55:59.022760 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" event={"ID":"09ccaa74-5871-4408-8476-54e35b95a774","Type":"ContainerStarted","Data":"d2e858925a5ea7baef841c7ff66f6aa59a92aeb7401b5f45d03d4ffa2ac0676b"} Sep 29 12:55:59 crc kubenswrapper[4611]: I0929 12:55:59.032529 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" event={"ID":"2348e002-3282-492c-a309-3e5b9eacfefd","Type":"ContainerStarted","Data":"516d3e2eedd43bbf6b75b6016c82e7a9a67958d04357a282f3baadbc33bb3a6a"} Sep 29 12:55:59 crc kubenswrapper[4611]: I0929 12:55:59.033636 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.043373 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" event={"ID":"09ccaa74-5871-4408-8476-54e35b95a774","Type":"ContainerStarted","Data":"bfa61ac8bdeba732a07cf65a1a94548a5dd2d75ce8ccca534071c1690896160f"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.043479 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.046340 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" event={"ID":"6cb9eef2-e10a-4a0c-bf29-8ade30f57048","Type":"ContainerStarted","Data":"ff74dd5c4bd1568151757650811788d2be680bbcbd45e85f09889241f54d7de5"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.046497 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.050416 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" event={"ID":"20d2ac56-4812-4211-82c2-787ece927b52","Type":"ContainerStarted","Data":"7eebd86b04e85b9b21812df426bc28fe49750bf348e791a90a116c7c87c33146"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.058481 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" event={"ID":"043571f9-41a3-4573-a1a5-f50f80be69e9","Type":"ContainerStarted","Data":"011fd68d06e8c4335a83a942b7b6c6a4676cb858ae8e1d8bcb40b87894dbf29e"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.059169 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.063220 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" event={"ID":"c01aecec-3545-4b0d-a81f-0440b1cc2c19","Type":"ContainerStarted","Data":"b12fd7aff43e189ee60f83b104629b65f553fd53791a9f8ca6b5d20817b729c5"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.063756 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.068814 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" event={"ID":"5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b","Type":"ContainerStarted","Data":"e877efd201ebb9b1e830e40abb97eeed0a2ade35d42a2285feca3a17300f40d1"} Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.071187 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:24ce2afd2fe5e4e059512e3f9bf364b370fe6301610db19145d2b61c485fbd3a\\\"\"" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" podUID="5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.075336 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" event={"ID":"c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6","Type":"ContainerStarted","Data":"894125b7205c58c0b6e7827f80942444ee3484f313ddb6c53fe237ebfd33f519"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.075684 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.090814 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" event={"ID":"198f1c1f-a452-4e1a-be6a-7bcfbe372441","Type":"ContainerStarted","Data":"91d13ccd91b0b53f24b00542cf68b15825dbe7cf8d1bf87898aa36fcf589bf5c"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.091134 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.092820 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" podStartSLOduration=5.22380179 podStartE2EDuration="29.092806204s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.252328158 +0000 UTC m=+921.143847764" lastFinishedPulling="2025-09-29 12:55:58.121332572 +0000 UTC m=+945.012852178" observedRunningTime="2025-09-29 12:55:59.753734129 +0000 UTC m=+946.645253735" watchObservedRunningTime="2025-09-29 12:56:00.092806204 +0000 UTC m=+946.984325810" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.093221 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" podStartSLOduration=6.554051627 podStartE2EDuration="29.093216485s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.132061634 +0000 UTC m=+920.023581240" lastFinishedPulling="2025-09-29 12:55:55.671226492 +0000 UTC m=+942.562746098" observedRunningTime="2025-09-29 12:56:00.090092805 +0000 UTC m=+946.981612431" watchObservedRunningTime="2025-09-29 12:56:00.093216485 +0000 UTC m=+946.984736091" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.120101 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" event={"ID":"9774896f-6656-4059-84b3-1e40fe0b5a30","Type":"ContainerStarted","Data":"b7a361d79003270ab57b457c08ec613d80f4482b00387517b5a981903f70525b"} Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.120140 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.120152 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" event={"ID":"9774896f-6656-4059-84b3-1e40fe0b5a30","Type":"ContainerStarted","Data":"6dc48dbfcc8018944f628579495b7e34adc6da8883240e949f352fa3ab9b5c0c"} Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.129083 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:fb5f2fb9bf2089a6b616be81954d0e6130f91d949c8cfda816c926cc48fd903c\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" podUID="4f637c90-4822-4587-922d-3dbf2240977b" Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.129296 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:d0af12270460533c528c8ef9de21b9a191648efa8787604862646a33f4e950ee\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" podUID="37ff3b92-ad5d-43ef-a942-b4dcd472c9c5" Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.129405 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:da324acd36c9c781d8769b413da12c4d281c3e400b4165e0e5c8a75a8cc5edc5\\\"\"" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" podUID="b63c6e5e-8aed-46b0-847a-d7a129e56281" Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.129389 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:dcd2893c765f69d05481eb20f3a0244c8abc9fa67f91bdd81532555cedd3acd2\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" podUID="78aeae10-6ff4-4ec2-9a6e-617b5b774122" Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.129451 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a857f1d48b6bb38e77997baf7e7b7a9930aacab912b42ac10d1023d7e4e1dad0\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" podUID="eaa8a3d9-b8aa-4524-9e85-3e56463484f8" Sep 29 12:56:00 crc kubenswrapper[4611]: E0929 12:56:00.129537 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:759b6819170324524e39ee25d0fe171e6a9a638e62904944a596cfab42481ef7\\\"\"" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" podUID="4365233e-5b3b-4d90-8497-32deefcdc842" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.141278 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" podStartSLOduration=5.331282977 podStartE2EDuration="29.14125727s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.272284853 +0000 UTC m=+921.163804459" lastFinishedPulling="2025-09-29 12:55:58.082259156 +0000 UTC m=+944.973778752" observedRunningTime="2025-09-29 12:56:00.131478438 +0000 UTC m=+947.022998054" watchObservedRunningTime="2025-09-29 12:56:00.14125727 +0000 UTC m=+947.032776876" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.264259 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" podStartSLOduration=7.314287623 podStartE2EDuration="29.264241176s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.720524357 +0000 UTC m=+920.612043963" lastFinishedPulling="2025-09-29 12:55:55.67047791 +0000 UTC m=+942.561997516" observedRunningTime="2025-09-29 12:56:00.259307873 +0000 UTC m=+947.150827479" watchObservedRunningTime="2025-09-29 12:56:00.264241176 +0000 UTC m=+947.155760782" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.327351 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" podStartSLOduration=4.463798081 podStartE2EDuration="29.327330604s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.132290401 +0000 UTC m=+920.023810007" lastFinishedPulling="2025-09-29 12:55:57.995822924 +0000 UTC m=+944.887342530" observedRunningTime="2025-09-29 12:56:00.32268608 +0000 UTC m=+947.214205686" watchObservedRunningTime="2025-09-29 12:56:00.327330604 +0000 UTC m=+947.218850210" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.376760 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" podStartSLOduration=7.223989029 podStartE2EDuration="29.376743059s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.517595667 +0000 UTC m=+920.409115273" lastFinishedPulling="2025-09-29 12:55:55.670349697 +0000 UTC m=+942.561869303" observedRunningTime="2025-09-29 12:56:00.371305932 +0000 UTC m=+947.262825538" watchObservedRunningTime="2025-09-29 12:56:00.376743059 +0000 UTC m=+947.268262665" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.503443 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" podStartSLOduration=5.737015325 podStartE2EDuration="29.503426971s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.716133711 +0000 UTC m=+920.607653317" lastFinishedPulling="2025-09-29 12:55:57.482545357 +0000 UTC m=+944.374064963" observedRunningTime="2025-09-29 12:56:00.500676551 +0000 UTC m=+947.392196157" watchObservedRunningTime="2025-09-29 12:56:00.503426971 +0000 UTC m=+947.394946577" Sep 29 12:56:00 crc kubenswrapper[4611]: I0929 12:56:00.613615 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" podStartSLOduration=5.763645432 podStartE2EDuration="29.613597137s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.281693794 +0000 UTC m=+921.173213400" lastFinishedPulling="2025-09-29 12:55:58.131645499 +0000 UTC m=+945.023165105" observedRunningTime="2025-09-29 12:56:00.589221574 +0000 UTC m=+947.480741180" watchObservedRunningTime="2025-09-29 12:56:00.613597137 +0000 UTC m=+947.505116743" Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.141493 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" event={"ID":"c162813b-a3c1-4d12-a3ec-5ecb784c56da","Type":"ContainerStarted","Data":"1c06b0be55db996262f578347f697dd177960e87e072e2d5b595ebd40aaa18df"} Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.141858 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.143300 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" event={"ID":"6e145eda-2d1e-414d-b09c-b78dc328af46","Type":"ContainerStarted","Data":"890773278b519f6b2b3d159c96e75ee662fdd828a35c05ae5f023d9c30a37fe3"} Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.143367 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.145205 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" event={"ID":"20d2ac56-4812-4211-82c2-787ece927b52","Type":"ContainerStarted","Data":"270edbed5d529bf45ce63d7c371b832adc8f29e6c46685fbb3b550f2d59565dc"} Sep 29 12:56:01 crc kubenswrapper[4611]: E0929 12:56:01.147198 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:24ce2afd2fe5e4e059512e3f9bf364b370fe6301610db19145d2b61c485fbd3a\\\"\"" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" podUID="5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b" Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.160915 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" podStartSLOduration=7.574140413 podStartE2EDuration="30.160897414s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.715736649 +0000 UTC m=+920.607256245" lastFinishedPulling="2025-09-29 12:55:56.30249364 +0000 UTC m=+943.194013246" observedRunningTime="2025-09-29 12:56:01.160348958 +0000 UTC m=+948.051868564" watchObservedRunningTime="2025-09-29 12:56:01.160897414 +0000 UTC m=+948.052417020" Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.202552 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" podStartSLOduration=6.596009026 podStartE2EDuration="30.202529664s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.524039323 +0000 UTC m=+920.415558929" lastFinishedPulling="2025-09-29 12:55:57.130559961 +0000 UTC m=+944.022079567" observedRunningTime="2025-09-29 12:56:01.197651543 +0000 UTC m=+948.089171179" watchObservedRunningTime="2025-09-29 12:56:01.202529664 +0000 UTC m=+948.094049280" Sep 29 12:56:01 crc kubenswrapper[4611]: I0929 12:56:01.216333 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" podStartSLOduration=5.69842041 podStartE2EDuration="30.216317781s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.515831396 +0000 UTC m=+920.407351002" lastFinishedPulling="2025-09-29 12:55:58.033728767 +0000 UTC m=+944.925248373" observedRunningTime="2025-09-29 12:56:01.214450057 +0000 UTC m=+948.105969673" watchObservedRunningTime="2025-09-29 12:56:01.216317781 +0000 UTC m=+948.107837387" Sep 29 12:56:02 crc kubenswrapper[4611]: I0929 12:56:02.152799 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:56:03 crc kubenswrapper[4611]: I0929 12:56:03.160500 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5869b4f857-md24f" Sep 29 12:56:04 crc kubenswrapper[4611]: I0929 12:56:04.629501 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:56:04 crc kubenswrapper[4611]: I0929 12:56:04.629594 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:56:11 crc kubenswrapper[4611]: I0929 12:56:11.810298 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-54767c9785-ph2fv" Sep 29 12:56:11 crc kubenswrapper[4611]: I0929 12:56:11.906958 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-7bb9679997-2fcdq" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.064357 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-5d9d689896-m94tx" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.180469 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5dfc69dd64-k6z9q" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.227210 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" event={"ID":"4ec018f9-0388-4dac-af1d-75d43cfc0f89","Type":"ContainerStarted","Data":"bc7f892f4a7abbe795348a3ebd4407b5777464e962983d046764a7e130543ab6"} Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.234740 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" event={"ID":"78aeae10-6ff4-4ec2-9a6e-617b5b774122","Type":"ContainerStarted","Data":"f2fd06aa3ef67cff6da68ed8b48f9ee977ba87ea724382618c2e74c01fb66763"} Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.235551 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.261944 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-769bb6b489-8mwgc" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.264797 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-hkpgs" podStartSLOduration=2.615295521 podStartE2EDuration="40.264782155s" podCreationTimestamp="2025-09-29 12:55:32 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.230421406 +0000 UTC m=+921.121941012" lastFinishedPulling="2025-09-29 12:56:11.87990804 +0000 UTC m=+958.771427646" observedRunningTime="2025-09-29 12:56:12.244011957 +0000 UTC m=+959.135531563" watchObservedRunningTime="2025-09-29 12:56:12.264782155 +0000 UTC m=+959.156301761" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.298662 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" podStartSLOduration=2.755483325 podStartE2EDuration="41.298636841s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.338900916 +0000 UTC m=+920.230420522" lastFinishedPulling="2025-09-29 12:56:11.882054442 +0000 UTC m=+958.773574038" observedRunningTime="2025-09-29 12:56:12.294751599 +0000 UTC m=+959.186271215" watchObservedRunningTime="2025-09-29 12:56:12.298636841 +0000 UTC m=+959.190156467" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.315079 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-747665895-hdcxr" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.336954 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-8b756d9b7-t9stx" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.377269 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-577fccdf59-w6qg5" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.558407 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5d8d5f5cf9-jvqfh" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.603887 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-857777455b-wqpzn" Sep 29 12:56:12 crc kubenswrapper[4611]: I0929 12:56:12.648216 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-6fdf4565bc-8d47j" Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.243384 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" event={"ID":"37ff3b92-ad5d-43ef-a942-b4dcd472c9c5","Type":"ContainerStarted","Data":"608c4cd1faf0c6e00e227046b977b9148a65afff47bedaee78196b8ea0a9a528"} Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.243721 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.245446 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" event={"ID":"4365233e-5b3b-4d90-8497-32deefcdc842","Type":"ContainerStarted","Data":"d74927c6a26a47f54e828877ed079f322b09daf0b9a7f5595bd878c960842259"} Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.245765 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.264357 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" podStartSLOduration=4.061697809 podStartE2EDuration="42.264338469s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.176120231 +0000 UTC m=+921.067639837" lastFinishedPulling="2025-09-29 12:56:12.378760891 +0000 UTC m=+959.270280497" observedRunningTime="2025-09-29 12:56:13.261979071 +0000 UTC m=+960.153498677" watchObservedRunningTime="2025-09-29 12:56:13.264338469 +0000 UTC m=+960.155858075" Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.473745 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-b758b5fbd-w8r7q" Sep 29 12:56:13 crc kubenswrapper[4611]: I0929 12:56:13.491848 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" podStartSLOduration=3.3446584059999998 podStartE2EDuration="41.491823237s" podCreationTimestamp="2025-09-29 12:55:32 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.230806447 +0000 UTC m=+921.122326053" lastFinishedPulling="2025-09-29 12:56:12.377971278 +0000 UTC m=+959.269490884" observedRunningTime="2025-09-29 12:56:13.283080439 +0000 UTC m=+960.174600045" watchObservedRunningTime="2025-09-29 12:56:13.491823237 +0000 UTC m=+960.383342843" Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.269405 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" event={"ID":"b63c6e5e-8aed-46b0-847a-d7a129e56281","Type":"ContainerStarted","Data":"768aef52bcd715705bbd6a009145fd93a34d3cb381ae327996354e2a20a52023"} Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.270914 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.278339 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" event={"ID":"eaa8a3d9-b8aa-4524-9e85-3e56463484f8","Type":"ContainerStarted","Data":"ada9783b779ada95514aca3a0b002c9dadde035912986ed5d9b5222c72b704c9"} Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.278661 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.280637 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" event={"ID":"4f637c90-4822-4587-922d-3dbf2240977b","Type":"ContainerStarted","Data":"8aec83caf39a597f2809d64a0b6fd8e3eace5cf569be7784e3eb7fd8a211e65e"} Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.280846 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.300032 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" podStartSLOduration=4.361413409 podStartE2EDuration="44.300008242s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.229975043 +0000 UTC m=+921.121494649" lastFinishedPulling="2025-09-29 12:56:14.168569876 +0000 UTC m=+961.060089482" observedRunningTime="2025-09-29 12:56:15.28883308 +0000 UTC m=+962.180352686" watchObservedRunningTime="2025-09-29 12:56:15.300008242 +0000 UTC m=+962.191527858" Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.320975 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" podStartSLOduration=4.753965496 podStartE2EDuration="44.320953926s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:34.600666639 +0000 UTC m=+921.492186245" lastFinishedPulling="2025-09-29 12:56:14.167655069 +0000 UTC m=+961.059174675" observedRunningTime="2025-09-29 12:56:15.315065656 +0000 UTC m=+962.206585262" watchObservedRunningTime="2025-09-29 12:56:15.320953926 +0000 UTC m=+962.212473532" Sep 29 12:56:15 crc kubenswrapper[4611]: I0929 12:56:15.335464 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" podStartSLOduration=3.184468843 podStartE2EDuration="44.335444554s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.111998636 +0000 UTC m=+920.003518242" lastFinishedPulling="2025-09-29 12:56:14.262974347 +0000 UTC m=+961.154493953" observedRunningTime="2025-09-29 12:56:15.333092776 +0000 UTC m=+962.224612382" watchObservedRunningTime="2025-09-29 12:56:15.335444554 +0000 UTC m=+962.226964160" Sep 29 12:56:16 crc kubenswrapper[4611]: I0929 12:56:16.292845 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" event={"ID":"5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b","Type":"ContainerStarted","Data":"5d759679e374cfd5d86e584bfb8d8138475b361ad5ecf7760779b3db162b6885"} Sep 29 12:56:16 crc kubenswrapper[4611]: I0929 12:56:16.293495 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:56:16 crc kubenswrapper[4611]: I0929 12:56:16.315806 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" podStartSLOduration=3.153657193 podStartE2EDuration="45.315788623s" podCreationTimestamp="2025-09-29 12:55:31 +0000 UTC" firstStartedPulling="2025-09-29 12:55:33.154755729 +0000 UTC m=+920.046275335" lastFinishedPulling="2025-09-29 12:56:15.316887159 +0000 UTC m=+962.208406765" observedRunningTime="2025-09-29 12:56:16.309207414 +0000 UTC m=+963.200727020" watchObservedRunningTime="2025-09-29 12:56:16.315788623 +0000 UTC m=+963.207308229" Sep 29 12:56:21 crc kubenswrapper[4611]: I0929 12:56:21.753160 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7fdd8988b7-527vk" Sep 29 12:56:21 crc kubenswrapper[4611]: I0929 12:56:21.812744 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7c68997f6b-thmfm" Sep 29 12:56:22 crc kubenswrapper[4611]: I0929 12:56:22.004941 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-68ccf47b7f-dr6tt" Sep 29 12:56:22 crc kubenswrapper[4611]: I0929 12:56:22.256474 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-cf9757659-9xvq5" Sep 29 12:56:22 crc kubenswrapper[4611]: I0929 12:56:22.469515 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-645f75c974-l5dcb" Sep 29 12:56:22 crc kubenswrapper[4611]: I0929 12:56:22.712312 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5f4f4847c9-tq7mf" Sep 29 12:56:23 crc kubenswrapper[4611]: I0929 12:56:23.906082 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl" Sep 29 12:56:34 crc kubenswrapper[4611]: I0929 12:56:34.628688 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:56:34 crc kubenswrapper[4611]: I0929 12:56:34.629178 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.221854 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c745fd669-szn4p"] Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.223400 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.235908 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.236077 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.236185 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-gxr22" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.236292 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.240183 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c745fd669-szn4p"] Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.331480 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-669d777cff-48x89"] Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.333089 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.338474 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h6dk\" (UniqueName: \"kubernetes.io/projected/9114b313-4447-4262-9ea9-8486cbfcf198-kube-api-access-7h6dk\") pod \"dnsmasq-dns-c745fd669-szn4p\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.338637 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9114b313-4447-4262-9ea9-8486cbfcf198-config\") pod \"dnsmasq-dns-c745fd669-szn4p\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.339241 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.350593 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669d777cff-48x89"] Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.440207 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-config\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.440635 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9j8t\" (UniqueName: \"kubernetes.io/projected/17157226-b9ae-43e0-86be-7be96ceea15a-kube-api-access-n9j8t\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.440693 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9114b313-4447-4262-9ea9-8486cbfcf198-config\") pod \"dnsmasq-dns-c745fd669-szn4p\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.440748 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-dns-svc\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.440778 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h6dk\" (UniqueName: \"kubernetes.io/projected/9114b313-4447-4262-9ea9-8486cbfcf198-kube-api-access-7h6dk\") pod \"dnsmasq-dns-c745fd669-szn4p\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.441665 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9114b313-4447-4262-9ea9-8486cbfcf198-config\") pod \"dnsmasq-dns-c745fd669-szn4p\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.462726 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h6dk\" (UniqueName: \"kubernetes.io/projected/9114b313-4447-4262-9ea9-8486cbfcf198-kube-api-access-7h6dk\") pod \"dnsmasq-dns-c745fd669-szn4p\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.541705 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-dns-svc\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.541810 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-config\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.541836 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9j8t\" (UniqueName: \"kubernetes.io/projected/17157226-b9ae-43e0-86be-7be96ceea15a-kube-api-access-n9j8t\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.542607 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-dns-svc\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.542675 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-config\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.555880 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.575690 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9j8t\" (UniqueName: \"kubernetes.io/projected/17157226-b9ae-43e0-86be-7be96ceea15a-kube-api-access-n9j8t\") pod \"dnsmasq-dns-669d777cff-48x89\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:43 crc kubenswrapper[4611]: I0929 12:56:43.646595 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:56:44 crc kubenswrapper[4611]: I0929 12:56:44.177732 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c745fd669-szn4p"] Sep 29 12:56:44 crc kubenswrapper[4611]: I0929 12:56:44.279360 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669d777cff-48x89"] Sep 29 12:56:44 crc kubenswrapper[4611]: I0929 12:56:44.484426 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d777cff-48x89" event={"ID":"17157226-b9ae-43e0-86be-7be96ceea15a","Type":"ContainerStarted","Data":"5ccc3922181de98a550d34ee9df867f0917bf5759fdf00cf9d9344c0687b46bc"} Sep 29 12:56:44 crc kubenswrapper[4611]: I0929 12:56:44.485728 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c745fd669-szn4p" event={"ID":"9114b313-4447-4262-9ea9-8486cbfcf198","Type":"ContainerStarted","Data":"c67dc3d8834d6e4c9a47eff6a509dabaecbcc7aa2c47cce47582bb5f8133ed4f"} Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.448670 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669d777cff-48x89"] Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.477883 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bf8496dff-sb4bg"] Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.482226 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.511428 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bf8496dff-sb4bg"] Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.599468 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-599pz\" (UniqueName: \"kubernetes.io/projected/3368069f-c428-472b-a72c-f38f0aea7b4d-kube-api-access-599pz\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.599906 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-dns-svc\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.600059 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-config\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.710419 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-config\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.710545 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-599pz\" (UniqueName: \"kubernetes.io/projected/3368069f-c428-472b-a72c-f38f0aea7b4d-kube-api-access-599pz\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.710580 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-dns-svc\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.711717 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-config\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.711735 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-dns-svc\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.757907 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-599pz\" (UniqueName: \"kubernetes.io/projected/3368069f-c428-472b-a72c-f38f0aea7b4d-kube-api-access-599pz\") pod \"dnsmasq-dns-7bf8496dff-sb4bg\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.817540 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.829223 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c745fd669-szn4p"] Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.872722 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c98cdcf59-n2mtw"] Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.874418 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:46 crc kubenswrapper[4611]: I0929 12:56:46.902951 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c98cdcf59-n2mtw"] Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.039024 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-config\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.043985 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-dns-svc\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.044066 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8xgs\" (UniqueName: \"kubernetes.io/projected/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-kube-api-access-g8xgs\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.145929 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-config\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.146393 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-dns-svc\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.146437 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8xgs\" (UniqueName: \"kubernetes.io/projected/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-kube-api-access-g8xgs\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.147652 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-dns-svc\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.148309 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-config\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.201135 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8xgs\" (UniqueName: \"kubernetes.io/projected/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-kube-api-access-g8xgs\") pod \"dnsmasq-dns-7c98cdcf59-n2mtw\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.289868 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.345211 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bf8496dff-sb4bg"] Sep 29 12:56:47 crc kubenswrapper[4611]: W0929 12:56:47.393112 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3368069f_c428_472b_a72c_f38f0aea7b4d.slice/crio-0b735b3e7facb7e0a6131321904ee597c6aa60bbb8310380dc359f5f2edb2db7 WatchSource:0}: Error finding container 0b735b3e7facb7e0a6131321904ee597c6aa60bbb8310380dc359f5f2edb2db7: Status 404 returned error can't find the container with id 0b735b3e7facb7e0a6131321904ee597c6aa60bbb8310380dc359f5f2edb2db7 Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.543858 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" event={"ID":"3368069f-c428-472b-a72c-f38f0aea7b4d","Type":"ContainerStarted","Data":"0b735b3e7facb7e0a6131321904ee597c6aa60bbb8310380dc359f5f2edb2db7"} Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.676842 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.678426 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.683907 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.685980 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.686362 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.686583 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.687019 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.687471 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-xm2qj" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.688051 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.690281 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760275 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/eb4ad743-3387-43bc-b15d-e3d4b0825793-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760353 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/eb4ad743-3387-43bc-b15d-e3d4b0825793-pod-info\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760403 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-server-conf\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760435 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760492 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-config-data\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760528 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760561 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760583 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760606 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760669 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.760702 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llr25\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-kube-api-access-llr25\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864026 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864103 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864152 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864172 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864199 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864237 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llr25\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-kube-api-access-llr25\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864259 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/eb4ad743-3387-43bc-b15d-e3d4b0825793-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864313 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/eb4ad743-3387-43bc-b15d-e3d4b0825793-pod-info\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864336 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-server-conf\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864356 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.864387 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-config-data\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.866570 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-config-data\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.873436 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-server-conf\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.873588 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.874119 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.874243 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.881831 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.884676 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c98cdcf59-n2mtw"] Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.889384 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/eb4ad743-3387-43bc-b15d-e3d4b0825793-pod-info\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.889383 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.889676 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/eb4ad743-3387-43bc-b15d-e3d4b0825793-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.893830 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llr25\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-kube-api-access-llr25\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.896795 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:47 crc kubenswrapper[4611]: I0929 12:56:47.915652 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " pod="openstack/rabbitmq-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.023192 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.041713 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.044857 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.051030 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.051304 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.051633 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.051806 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.051962 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.052189 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7v52b" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.052443 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.057411 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170323 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170375 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170545 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170610 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170656 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170691 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170723 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a8501653-441a-4c5f-b098-bc5fb7aeba22-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170813 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170846 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zt98\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-kube-api-access-7zt98\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.170963 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a8501653-441a-4c5f-b098-bc5fb7aeba22-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272046 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a8501653-441a-4c5f-b098-bc5fb7aeba22-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272153 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272197 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272253 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272283 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272309 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272338 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272363 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a8501653-441a-4c5f-b098-bc5fb7aeba22-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272385 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zt98\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-kube-api-access-7zt98\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272406 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.272439 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.273015 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.277714 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a8501653-441a-4c5f-b098-bc5fb7aeba22-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.284274 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.284875 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.285735 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.286014 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.286156 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.286964 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a8501653-441a-4c5f-b098-bc5fb7aeba22-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.287655 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.296245 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.318295 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zt98\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-kube-api-access-7zt98\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.335863 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.384234 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.557560 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" event={"ID":"9ef2ccbb-09cb-4074-858c-2ea164c83fa9","Type":"ContainerStarted","Data":"ea5a9ddc392e4619b4badd9f156ba191ec54dde9621283cf58c590ce7a8f8a21"} Sep 29 12:56:48 crc kubenswrapper[4611]: I0929 12:56:48.694080 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 12:56:49 crc kubenswrapper[4611]: I0929 12:56:49.053486 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 12:56:49 crc kubenswrapper[4611]: I0929 12:56:49.576914 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a8501653-441a-4c5f-b098-bc5fb7aeba22","Type":"ContainerStarted","Data":"a812ce5b0bd22e35fda76bb127803a7de863c94a5d2ab34c362e0138bc12489c"} Sep 29 12:56:49 crc kubenswrapper[4611]: I0929 12:56:49.589888 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"eb4ad743-3387-43bc-b15d-e3d4b0825793","Type":"ContainerStarted","Data":"b9fb1b646267ea31ecfc11c6b369e5c0a74405c367e4ecb2972818f0663580e3"} Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.155475 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.169519 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.169665 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.174225 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.177910 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.180366 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.180577 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-szqz2" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.180785 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.187813 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.212812 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.212876 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.212944 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqzdh\" (UniqueName: \"kubernetes.io/projected/367a7739-cd0c-4a45-b804-1d763d6a55f4-kube-api-access-wqzdh\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.212968 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-secrets\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.212988 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.213010 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-config-data-default\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.213033 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.213062 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-kolla-config\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.213088 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/367a7739-cd0c-4a45-b804-1d763d6a55f4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314224 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314316 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqzdh\" (UniqueName: \"kubernetes.io/projected/367a7739-cd0c-4a45-b804-1d763d6a55f4-kube-api-access-wqzdh\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314359 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314378 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-secrets\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314400 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-config-data-default\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314441 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314466 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-kolla-config\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314489 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/367a7739-cd0c-4a45-b804-1d763d6a55f4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314577 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.314928 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.321043 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-kolla-config\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.321113 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/367a7739-cd0c-4a45-b804-1d763d6a55f4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.321472 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-config-data-default\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.321566 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/367a7739-cd0c-4a45-b804-1d763d6a55f4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.326564 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-secrets\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.328281 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.329274 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/367a7739-cd0c-4a45-b804-1d763d6a55f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.338912 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqzdh\" (UniqueName: \"kubernetes.io/projected/367a7739-cd0c-4a45-b804-1d763d6a55f4-kube-api-access-wqzdh\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.342104 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"367a7739-cd0c-4a45-b804-1d763d6a55f4\") " pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.502779 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.588944 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.590737 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.597524 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.597780 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.598964 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-bpgw8" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.599124 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.679138 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726255 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726309 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726351 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726475 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726528 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726552 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726643 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvm6x\" (UniqueName: \"kubernetes.io/projected/46d0113e-4eb9-4b51-981e-744b6dd0842e-kube-api-access-zvm6x\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726723 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.726819 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46d0113e-4eb9-4b51-981e-744b6dd0842e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828063 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828398 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828436 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828459 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828476 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828490 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828519 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvm6x\" (UniqueName: \"kubernetes.io/projected/46d0113e-4eb9-4b51-981e-744b6dd0842e-kube-api-access-zvm6x\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828553 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.828592 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46d0113e-4eb9-4b51-981e-744b6dd0842e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.831095 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46d0113e-4eb9-4b51-981e-744b6dd0842e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.832174 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.839943 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.841019 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.858400 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.859441 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46d0113e-4eb9-4b51-981e-744b6dd0842e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.863259 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.873694 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvm6x\" (UniqueName: \"kubernetes.io/projected/46d0113e-4eb9-4b51-981e-744b6dd0842e-kube-api-access-zvm6x\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.875116 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46d0113e-4eb9-4b51-981e-744b6dd0842e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.909306 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"46d0113e-4eb9-4b51-981e-744b6dd0842e\") " pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:50 crc kubenswrapper[4611]: I0929 12:56:50.940905 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.070679 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.072435 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.074725 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.077964 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-rgj5s" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.083892 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.101475 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.243517 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53d14921-aa05-4a37-acec-35bb89b384fb-config-data\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.243911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m869\" (UniqueName: \"kubernetes.io/projected/53d14921-aa05-4a37-acec-35bb89b384fb-kube-api-access-7m869\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.243962 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d14921-aa05-4a37-acec-35bb89b384fb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.244007 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/53d14921-aa05-4a37-acec-35bb89b384fb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.244068 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d14921-aa05-4a37-acec-35bb89b384fb-kolla-config\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.278071 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.345709 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d14921-aa05-4a37-acec-35bb89b384fb-kolla-config\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.346203 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53d14921-aa05-4a37-acec-35bb89b384fb-config-data\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.346349 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m869\" (UniqueName: \"kubernetes.io/projected/53d14921-aa05-4a37-acec-35bb89b384fb-kube-api-access-7m869\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.346392 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d14921-aa05-4a37-acec-35bb89b384fb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.346427 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/53d14921-aa05-4a37-acec-35bb89b384fb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.348796 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d14921-aa05-4a37-acec-35bb89b384fb-kolla-config\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.350581 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/53d14921-aa05-4a37-acec-35bb89b384fb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.351489 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53d14921-aa05-4a37-acec-35bb89b384fb-config-data\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.353445 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d14921-aa05-4a37-acec-35bb89b384fb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.481293 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m869\" (UniqueName: \"kubernetes.io/projected/53d14921-aa05-4a37-acec-35bb89b384fb-kube-api-access-7m869\") pod \"memcached-0\" (UID: \"53d14921-aa05-4a37-acec-35bb89b384fb\") " pod="openstack/memcached-0" Sep 29 12:56:51 crc kubenswrapper[4611]: W0929 12:56:51.564315 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod367a7739_cd0c_4a45_b804_1d763d6a55f4.slice/crio-3741355e44037ed76e2efe77e126da74bf7d9041e3d99c5a74479abf9e5536c1 WatchSource:0}: Error finding container 3741355e44037ed76e2efe77e126da74bf7d9041e3d99c5a74479abf9e5536c1: Status 404 returned error can't find the container with id 3741355e44037ed76e2efe77e126da74bf7d9041e3d99c5a74479abf9e5536c1 Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.682488 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.703468 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"367a7739-cd0c-4a45-b804-1d763d6a55f4","Type":"ContainerStarted","Data":"3741355e44037ed76e2efe77e126da74bf7d9041e3d99c5a74479abf9e5536c1"} Sep 29 12:56:51 crc kubenswrapper[4611]: I0929 12:56:51.719002 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.148541 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.150219 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.154641 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-64hq7" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.158967 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.279250 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9xx7\" (UniqueName: \"kubernetes.io/projected/b739cb68-afe4-46e3-912c-318498feeb54-kube-api-access-g9xx7\") pod \"kube-state-metrics-0\" (UID: \"b739cb68-afe4-46e3-912c-318498feeb54\") " pod="openstack/kube-state-metrics-0" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.380939 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9xx7\" (UniqueName: \"kubernetes.io/projected/b739cb68-afe4-46e3-912c-318498feeb54-kube-api-access-g9xx7\") pod \"kube-state-metrics-0\" (UID: \"b739cb68-afe4-46e3-912c-318498feeb54\") " pod="openstack/kube-state-metrics-0" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.439540 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9xx7\" (UniqueName: \"kubernetes.io/projected/b739cb68-afe4-46e3-912c-318498feeb54-kube-api-access-g9xx7\") pod \"kube-state-metrics-0\" (UID: \"b739cb68-afe4-46e3-912c-318498feeb54\") " pod="openstack/kube-state-metrics-0" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.511026 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.522340 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 12:56:52 crc kubenswrapper[4611]: W0929 12:56:52.628801 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d14921_aa05_4a37_acec_35bb89b384fb.slice/crio-d35fe46f78258a7b7eac8c8451e6e6bc96ec39193ff4ad0be9665570faaddeba WatchSource:0}: Error finding container d35fe46f78258a7b7eac8c8451e6e6bc96ec39193ff4ad0be9665570faaddeba: Status 404 returned error can't find the container with id d35fe46f78258a7b7eac8c8451e6e6bc96ec39193ff4ad0be9665570faaddeba Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.725487 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"46d0113e-4eb9-4b51-981e-744b6dd0842e","Type":"ContainerStarted","Data":"d429b8e66e7aa5223498bc0a471b1f90ee2e8e6671570dc519c6410d79ca05df"} Sep 29 12:56:52 crc kubenswrapper[4611]: I0929 12:56:52.730262 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"53d14921-aa05-4a37-acec-35bb89b384fb","Type":"ContainerStarted","Data":"d35fe46f78258a7b7eac8c8451e6e6bc96ec39193ff4ad0be9665570faaddeba"} Sep 29 12:56:53 crc kubenswrapper[4611]: I0929 12:56:53.319388 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 12:56:53 crc kubenswrapper[4611]: W0929 12:56:53.355330 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb739cb68_afe4_46e3_912c_318498feeb54.slice/crio-7a70be62101e122e7cdb906015a35ed0d3a67c26b1110c88078681c83da6a1e9 WatchSource:0}: Error finding container 7a70be62101e122e7cdb906015a35ed0d3a67c26b1110c88078681c83da6a1e9: Status 404 returned error can't find the container with id 7a70be62101e122e7cdb906015a35ed0d3a67c26b1110c88078681c83da6a1e9 Sep 29 12:56:53 crc kubenswrapper[4611]: I0929 12:56:53.840278 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b739cb68-afe4-46e3-912c-318498feeb54","Type":"ContainerStarted","Data":"7a70be62101e122e7cdb906015a35ed0d3a67c26b1110c88078681c83da6a1e9"} Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.052561 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9jsdt"] Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.055532 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.065954 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-hhdkz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.066248 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.096918 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.099092 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9jsdt"] Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.110242 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-qrlpz"] Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.111875 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.111925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b42caf5f-0509-41a3-ab3c-49b5b2be817e-scripts\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.119342 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-log-ovn\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.119407 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b42caf5f-0509-41a3-ab3c-49b5b2be817e-ovn-controller-tls-certs\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.119497 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42caf5f-0509-41a3-ab3c-49b5b2be817e-combined-ca-bundle\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.119549 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-run-ovn\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.119577 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-run\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.119680 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvfqd\" (UniqueName: \"kubernetes.io/projected/b42caf5f-0509-41a3-ab3c-49b5b2be817e-kube-api-access-wvfqd\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.184851 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qrlpz"] Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221664 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b42caf5f-0509-41a3-ab3c-49b5b2be817e-ovn-controller-tls-certs\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221721 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47b31f8c-66df-4172-8185-abba6357fc20-scripts\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221768 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-log\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221800 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhq5j\" (UniqueName: \"kubernetes.io/projected/47b31f8c-66df-4172-8185-abba6357fc20-kube-api-access-dhq5j\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221822 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-lib\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221838 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42caf5f-0509-41a3-ab3c-49b5b2be817e-combined-ca-bundle\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221862 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-run-ovn\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221877 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-run\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.221971 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-etc-ovs\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222036 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvfqd\" (UniqueName: \"kubernetes.io/projected/b42caf5f-0509-41a3-ab3c-49b5b2be817e-kube-api-access-wvfqd\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222101 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-run\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222145 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b42caf5f-0509-41a3-ab3c-49b5b2be817e-scripts\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222212 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-log-ovn\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222411 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-run\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222527 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-run-ovn\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.222780 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b42caf5f-0509-41a3-ab3c-49b5b2be817e-var-log-ovn\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.225176 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b42caf5f-0509-41a3-ab3c-49b5b2be817e-scripts\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.233561 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b42caf5f-0509-41a3-ab3c-49b5b2be817e-combined-ca-bundle\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.244992 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvfqd\" (UniqueName: \"kubernetes.io/projected/b42caf5f-0509-41a3-ab3c-49b5b2be817e-kube-api-access-wvfqd\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.251190 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b42caf5f-0509-41a3-ab3c-49b5b2be817e-ovn-controller-tls-certs\") pod \"ovn-controller-9jsdt\" (UID: \"b42caf5f-0509-41a3-ab3c-49b5b2be817e\") " pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.330650 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-etc-ovs\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.331253 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-run\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.331393 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47b31f8c-66df-4172-8185-abba6357fc20-scripts\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.331495 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-log\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.331594 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhq5j\" (UniqueName: \"kubernetes.io/projected/47b31f8c-66df-4172-8185-abba6357fc20-kube-api-access-dhq5j\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.331736 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-lib\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.332047 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-lib\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.332228 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-log\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.332839 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-var-run\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.333011 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/47b31f8c-66df-4172-8185-abba6357fc20-etc-ovs\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.338585 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47b31f8c-66df-4172-8185-abba6357fc20-scripts\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.367198 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhq5j\" (UniqueName: \"kubernetes.io/projected/47b31f8c-66df-4172-8185-abba6357fc20-kube-api-access-dhq5j\") pod \"ovn-controller-ovs-qrlpz\" (UID: \"47b31f8c-66df-4172-8185-abba6357fc20\") " pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.430646 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt" Sep 29 12:56:54 crc kubenswrapper[4611]: I0929 12:56:54.460662 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.819657 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.821316 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.826081 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-7wgqv" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.826344 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.826498 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.839849 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.880879 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d360e26-9efd-4619-a0fc-77ac5eada7d0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.881281 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d360e26-9efd-4619-a0fc-77ac5eada7d0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.881355 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d360e26-9efd-4619-a0fc-77ac5eada7d0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.881392 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccpmg\" (UniqueName: \"kubernetes.io/projected/9d360e26-9efd-4619-a0fc-77ac5eada7d0-kube-api-access-ccpmg\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.881515 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.983432 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d360e26-9efd-4619-a0fc-77ac5eada7d0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.985421 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d360e26-9efd-4619-a0fc-77ac5eada7d0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.983494 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d360e26-9efd-4619-a0fc-77ac5eada7d0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.985512 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccpmg\" (UniqueName: \"kubernetes.io/projected/9d360e26-9efd-4619-a0fc-77ac5eada7d0-kube-api-access-ccpmg\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.986199 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.986501 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.986726 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d360e26-9efd-4619-a0fc-77ac5eada7d0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:55 crc kubenswrapper[4611]: I0929 12:56:55.999548 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d360e26-9efd-4619-a0fc-77ac5eada7d0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:56 crc kubenswrapper[4611]: I0929 12:56:56.018067 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d360e26-9efd-4619-a0fc-77ac5eada7d0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:56 crc kubenswrapper[4611]: I0929 12:56:56.039187 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccpmg\" (UniqueName: \"kubernetes.io/projected/9d360e26-9efd-4619-a0fc-77ac5eada7d0-kube-api-access-ccpmg\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:56 crc kubenswrapper[4611]: I0929 12:56:56.089141 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9d360e26-9efd-4619-a0fc-77ac5eada7d0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:56 crc kubenswrapper[4611]: I0929 12:56:56.158072 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 12:56:57 crc kubenswrapper[4611]: I0929 12:56:57.040006 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qrlpz"] Sep 29 12:56:58 crc kubenswrapper[4611]: I0929 12:56:58.119919 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qrlpz" event={"ID":"47b31f8c-66df-4172-8185-abba6357fc20","Type":"ContainerStarted","Data":"acc1c1649d200bc82576343f3cdac46381c9d082ea706a0a143ca56745c3af55"} Sep 29 12:56:58 crc kubenswrapper[4611]: I0929 12:56:58.428806 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9jsdt"] Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.289368 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.290945 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.297147 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-lwj97" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.304062 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.309858 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.342655 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.482665 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e73133c-5010-47af-a2e8-df18d77a3f42-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.482761 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e73133c-5010-47af-a2e8-df18d77a3f42-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.482786 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.482823 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkbjr\" (UniqueName: \"kubernetes.io/projected/6e73133c-5010-47af-a2e8-df18d77a3f42-kube-api-access-lkbjr\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.482989 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e73133c-5010-47af-a2e8-df18d77a3f42-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.585130 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e73133c-5010-47af-a2e8-df18d77a3f42-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.585437 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e73133c-5010-47af-a2e8-df18d77a3f42-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.585499 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e73133c-5010-47af-a2e8-df18d77a3f42-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.585534 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.585585 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkbjr\" (UniqueName: \"kubernetes.io/projected/6e73133c-5010-47af-a2e8-df18d77a3f42-kube-api-access-lkbjr\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.590772 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e73133c-5010-47af-a2e8-df18d77a3f42-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.595490 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e73133c-5010-47af-a2e8-df18d77a3f42-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.595702 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.597047 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e73133c-5010-47af-a2e8-df18d77a3f42-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.639978 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkbjr\" (UniqueName: \"kubernetes.io/projected/6e73133c-5010-47af-a2e8-df18d77a3f42-kube-api-access-lkbjr\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.663272 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e73133c-5010-47af-a2e8-df18d77a3f42\") " pod="openstack/ovsdbserver-sb-0" Sep 29 12:56:59 crc kubenswrapper[4611]: I0929 12:56:59.924964 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 12:57:00 crc kubenswrapper[4611]: W0929 12:57:00.525346 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb42caf5f_0509_41a3_ab3c_49b5b2be817e.slice/crio-03d8aaf1a4253be6003f9969dfeb9a3dc01f9a3138ee91cd64c92b89830ee608 WatchSource:0}: Error finding container 03d8aaf1a4253be6003f9969dfeb9a3dc01f9a3138ee91cd64c92b89830ee608: Status 404 returned error can't find the container with id 03d8aaf1a4253be6003f9969dfeb9a3dc01f9a3138ee91cd64c92b89830ee608 Sep 29 12:57:01 crc kubenswrapper[4611]: I0929 12:57:01.146240 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 12:57:01 crc kubenswrapper[4611]: I0929 12:57:01.156706 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt" event={"ID":"b42caf5f-0509-41a3-ab3c-49b5b2be817e","Type":"ContainerStarted","Data":"03d8aaf1a4253be6003f9969dfeb9a3dc01f9a3138ee91cd64c92b89830ee608"} Sep 29 12:57:01 crc kubenswrapper[4611]: W0929 12:57:01.238913 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d360e26_9efd_4619_a0fc_77ac5eada7d0.slice/crio-b6485e2111c1f40a0645174f00bb30962e675adbe62fbd1bdf384d5e9f75226d WatchSource:0}: Error finding container b6485e2111c1f40a0645174f00bb30962e675adbe62fbd1bdf384d5e9f75226d: Status 404 returned error can't find the container with id b6485e2111c1f40a0645174f00bb30962e675adbe62fbd1bdf384d5e9f75226d Sep 29 12:57:02 crc kubenswrapper[4611]: I0929 12:57:02.166810 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9d360e26-9efd-4619-a0fc-77ac5eada7d0","Type":"ContainerStarted","Data":"b6485e2111c1f40a0645174f00bb30962e675adbe62fbd1bdf384d5e9f75226d"} Sep 29 12:57:02 crc kubenswrapper[4611]: I0929 12:57:02.571565 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 12:57:04 crc kubenswrapper[4611]: I0929 12:57:04.628806 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:57:04 crc kubenswrapper[4611]: I0929 12:57:04.629157 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:57:04 crc kubenswrapper[4611]: I0929 12:57:04.629199 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 12:57:04 crc kubenswrapper[4611]: I0929 12:57:04.629830 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5dabec6d4a1f56079556aa2416ffa504eeb7f0aa06b802b890dac62cf28cc40d"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 12:57:04 crc kubenswrapper[4611]: I0929 12:57:04.629891 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://5dabec6d4a1f56079556aa2416ffa504eeb7f0aa06b802b890dac62cf28cc40d" gracePeriod=600 Sep 29 12:57:05 crc kubenswrapper[4611]: I0929 12:57:05.207062 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="5dabec6d4a1f56079556aa2416ffa504eeb7f0aa06b802b890dac62cf28cc40d" exitCode=0 Sep 29 12:57:05 crc kubenswrapper[4611]: I0929 12:57:05.207108 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"5dabec6d4a1f56079556aa2416ffa504eeb7f0aa06b802b890dac62cf28cc40d"} Sep 29 12:57:05 crc kubenswrapper[4611]: I0929 12:57:05.207152 4611 scope.go:117] "RemoveContainer" containerID="a017a7df5b38d210396454e3d5320e7848368cef39cc3b4e52346ca0e1c69ea7" Sep 29 12:57:09 crc kubenswrapper[4611]: W0929 12:57:09.981105 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e73133c_5010_47af_a2e8_df18d77a3f42.slice/crio-3ba9a2cf5aff8b2f249ba0c50775e8b38925798e2c7b82c5f9d66f6f1789ea99 WatchSource:0}: Error finding container 3ba9a2cf5aff8b2f249ba0c50775e8b38925798e2c7b82c5f9d66f6f1789ea99: Status 404 returned error can't find the container with id 3ba9a2cf5aff8b2f249ba0c50775e8b38925798e2c7b82c5f9d66f6f1789ea99 Sep 29 12:57:10 crc kubenswrapper[4611]: I0929 12:57:10.252904 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e73133c-5010-47af-a2e8-df18d77a3f42","Type":"ContainerStarted","Data":"3ba9a2cf5aff8b2f249ba0c50775e8b38925798e2c7b82c5f9d66f6f1789ea99"} Sep 29 12:57:19 crc kubenswrapper[4611]: E0929 12:57:19.748290 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:19 crc kubenswrapper[4611]: E0929 12:57:19.748860 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:19 crc kubenswrapper[4611]: E0929 12:57:19.748993 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --no-daemon --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-599pz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7bf8496dff-sb4bg_openstack(3368069f-c428-472b-a72c-f38f0aea7b4d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:19 crc kubenswrapper[4611]: E0929 12:57:19.750299 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" podUID="3368069f-c428-472b-a72c-f38f0aea7b4d" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.334904 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested\\\"\"" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" podUID="3368069f-c428-472b-a72c-f38f0aea7b4d" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.942581 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current-tested" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.942649 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current-tested" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.942786 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current-tested,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7zt98,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(a8501653-441a-4c5f-b098-bc5fb7aeba22): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.944216 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.945323 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.945359 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.945452 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --no-daemon --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n9j8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-669d777cff-48x89_openstack(17157226-b9ae-43e0-86be-7be96ceea15a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:20 crc kubenswrapper[4611]: E0929 12:57:20.946693 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-669d777cff-48x89" podUID="17157226-b9ae-43e0-86be-7be96ceea15a" Sep 29 12:57:21 crc kubenswrapper[4611]: E0929 12:57:21.348237 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current-tested\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" Sep 29 12:57:22 crc kubenswrapper[4611]: E0929 12:57:22.471448 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:22 crc kubenswrapper[4611]: E0929 12:57:22.471786 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:22 crc kubenswrapper[4611]: E0929 12:57:22.471906 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --no-daemon --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8xgs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7c98cdcf59-n2mtw_openstack(9ef2ccbb-09cb-4074-858c-2ea164c83fa9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:22 crc kubenswrapper[4611]: E0929 12:57:22.473224 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" podUID="9ef2ccbb-09cb-4074-858c-2ea164c83fa9" Sep 29 12:57:23 crc kubenswrapper[4611]: E0929 12:57:23.371384 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested\\\"\"" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" podUID="9ef2ccbb-09cb-4074-858c-2ea164c83fa9" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.138308 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.138699 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.138376 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.138827 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.138837 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current-tested,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wqzdh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(367a7739-cd0c-4a45-b804-1d763d6a55f4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.138969 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current-tested,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --no-daemon --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7h6dk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-c745fd669-szn4p_openstack(9114b313-4447-4262-9ea9-8486cbfcf198): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.140054 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-c745fd669-szn4p" podUID="9114b313-4447-4262-9ea9-8486cbfcf198" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.140490 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="367a7739-cd0c-4a45-b804-1d763d6a55f4" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.369602 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current-tested\\\"\"" pod="openstack/openstack-galera-0" podUID="367a7739-cd0c-4a45-b804-1d763d6a55f4" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.749323 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.749381 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.749511 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current-tested,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f6h668h595h7bh656hcfh64ch647h647h5f8h546h5f9hb5h575h5f9h646h5cbhd8h55h568h55bhbh687h56fh557h557h88hdfh546hc4h547hbfq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dhq5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-qrlpz_openstack(47b31f8c-66df-4172-8185-abba6357fc20): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.750852 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-qrlpz" podUID="47b31f8c-66df-4172-8185-abba6357fc20" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.811848 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.838946 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9j8t\" (UniqueName: \"kubernetes.io/projected/17157226-b9ae-43e0-86be-7be96ceea15a-kube-api-access-n9j8t\") pod \"17157226-b9ae-43e0-86be-7be96ceea15a\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.839555 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-dns-svc\") pod \"17157226-b9ae-43e0-86be-7be96ceea15a\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.839688 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-config\") pod \"17157226-b9ae-43e0-86be-7be96ceea15a\" (UID: \"17157226-b9ae-43e0-86be-7be96ceea15a\") " Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.840079 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "17157226-b9ae-43e0-86be-7be96ceea15a" (UID: "17157226-b9ae-43e0-86be-7be96ceea15a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.840192 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-config" (OuterVolumeSpecName: "config") pod "17157226-b9ae-43e0-86be-7be96ceea15a" (UID: "17157226-b9ae-43e0-86be-7be96ceea15a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.842426 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.842508 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17157226-b9ae-43e0-86be-7be96ceea15a-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.845948 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17157226-b9ae-43e0-86be-7be96ceea15a-kube-api-access-n9j8t" (OuterVolumeSpecName: "kube-api-access-n9j8t") pod "17157226-b9ae-43e0-86be-7be96ceea15a" (UID: "17157226-b9ae-43e0-86be-7be96ceea15a"). InnerVolumeSpecName "kube-api-access-n9j8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:24 crc kubenswrapper[4611]: I0929 12:57:24.944507 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9j8t\" (UniqueName: \"kubernetes.io/projected/17157226-b9ae-43e0-86be-7be96ceea15a-kube-api-access-n9j8t\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.993531 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.993576 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current-tested" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.993761 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current-tested,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f6h668h595h7bh656hcfh64ch647h647h5f8h546h5f9hb5h575h5f9h646h5cbhd8h55h568h55bhbh687h56fh557h557h88hdfh546hc4h547hbfq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wvfqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-9jsdt_openstack(b42caf5f-0509-41a3-ab3c-49b5b2be817e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:24 crc kubenswrapper[4611]: E0929 12:57:24.994964 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-9jsdt" podUID="b42caf5f-0509-41a3-ab3c-49b5b2be817e" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.189321 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-nb-db-server:current-tested" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.189647 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-nb-db-server:current-tested" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.189766 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-nb,Image:quay.rdoproject.org/podified-master-centos10/openstack-ovn-nb-db-server:current-tested,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n545h665h87hf5h89hfdhf6h697h65dh676h57dh654h58fh5d8h579h5c7h57bh569h58ch64dh7dh7dhc7h5cdh657h8dh689hbbhbfh584h67bh7q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-nb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ccpmg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-nb-0_openstack(9d360e26-9efd-4619-a0fc-77ac5eada7d0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.191535 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-nb-0" podUID="9d360e26-9efd-4619-a0fc-77ac5eada7d0" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.268947 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.350103 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9114b313-4447-4262-9ea9-8486cbfcf198-config\") pod \"9114b313-4447-4262-9ea9-8486cbfcf198\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.350242 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h6dk\" (UniqueName: \"kubernetes.io/projected/9114b313-4447-4262-9ea9-8486cbfcf198-kube-api-access-7h6dk\") pod \"9114b313-4447-4262-9ea9-8486cbfcf198\" (UID: \"9114b313-4447-4262-9ea9-8486cbfcf198\") " Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.351163 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9114b313-4447-4262-9ea9-8486cbfcf198-config" (OuterVolumeSpecName: "config") pod "9114b313-4447-4262-9ea9-8486cbfcf198" (UID: "9114b313-4447-4262-9ea9-8486cbfcf198"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.354327 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9114b313-4447-4262-9ea9-8486cbfcf198-kube-api-access-7h6dk" (OuterVolumeSpecName: "kube-api-access-7h6dk") pod "9114b313-4447-4262-9ea9-8486cbfcf198" (UID: "9114b313-4447-4262-9ea9-8486cbfcf198"). InnerVolumeSpecName "kube-api-access-7h6dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.378200 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d777cff-48x89" event={"ID":"17157226-b9ae-43e0-86be-7be96ceea15a","Type":"ContainerDied","Data":"5ccc3922181de98a550d34ee9df867f0917bf5759fdf00cf9d9344c0687b46bc"} Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.378254 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d777cff-48x89" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.382082 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c745fd669-szn4p" event={"ID":"9114b313-4447-4262-9ea9-8486cbfcf198","Type":"ContainerDied","Data":"c67dc3d8834d6e4c9a47eff6a509dabaecbcc7aa2c47cce47582bb5f8133ed4f"} Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.382162 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c745fd669-szn4p" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.384896 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current-tested\\\"\"" pod="openstack/ovn-controller-ovs-qrlpz" podUID="47b31f8c-66df-4172-8185-abba6357fc20" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.385118 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-nb-db-server:current-tested\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="9d360e26-9efd-4619-a0fc-77ac5eada7d0" Sep 29 12:57:25 crc kubenswrapper[4611]: E0929 12:57:25.385887 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current-tested\\\"\"" pod="openstack/ovn-controller-9jsdt" podUID="b42caf5f-0509-41a3-ab3c-49b5b2be817e" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.451730 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h6dk\" (UniqueName: \"kubernetes.io/projected/9114b313-4447-4262-9ea9-8486cbfcf198-kube-api-access-7h6dk\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.451761 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9114b313-4447-4262-9ea9-8486cbfcf198-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.522002 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c745fd669-szn4p"] Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.529576 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c745fd669-szn4p"] Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.569653 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669d777cff-48x89"] Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.574951 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-669d777cff-48x89"] Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.804178 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17157226-b9ae-43e0-86be-7be96ceea15a" path="/var/lib/kubelet/pods/17157226-b9ae-43e0-86be-7be96ceea15a/volumes" Sep 29 12:57:25 crc kubenswrapper[4611]: I0929 12:57:25.804588 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9114b313-4447-4262-9ea9-8486cbfcf198" path="/var/lib/kubelet/pods/9114b313-4447-4262-9ea9-8486cbfcf198/volumes" Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.390137 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"53d14921-aa05-4a37-acec-35bb89b384fb","Type":"ContainerStarted","Data":"ed001b50668f4beb457afa23cbb4de6f0ab3785e89370e09ec1d51c996a90bd0"} Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.390506 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.391987 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e73133c-5010-47af-a2e8-df18d77a3f42","Type":"ContainerStarted","Data":"6f24b7505ae1279f3a51513eb461e361bbf5ec6f07f3771ce9b63e39b556c1f2"} Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.393474 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"eb4ad743-3387-43bc-b15d-e3d4b0825793","Type":"ContainerStarted","Data":"be1f39ea68722bb92f5a313b14311073c5463b6ee64113518c5a704781fb9c26"} Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.395726 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"adbfe6821ab82328582fac5fae1e1a588692912e18a6cfaa37c8967ac7e74a78"} Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.396926 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b739cb68-afe4-46e3-912c-318498feeb54","Type":"ContainerStarted","Data":"f76e49e07d3dc77213c48fa8bef58f0164e66dbb0757d08bded4e27a0d75fbf6"} Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.397053 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.397966 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"46d0113e-4eb9-4b51-981e-744b6dd0842e","Type":"ContainerStarted","Data":"a7ba281eee43e16d8a51e606f407cea644a574ddde2575b77063b8c66ea272b2"} Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.411212 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.2883548559999998 podStartE2EDuration="35.411196989s" podCreationTimestamp="2025-09-29 12:56:51 +0000 UTC" firstStartedPulling="2025-09-29 12:56:52.718605056 +0000 UTC m=+999.610124662" lastFinishedPulling="2025-09-29 12:57:25.841447189 +0000 UTC m=+1032.732966795" observedRunningTime="2025-09-29 12:57:26.406664179 +0000 UTC m=+1033.298183785" watchObservedRunningTime="2025-09-29 12:57:26.411196989 +0000 UTC m=+1033.302716595" Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.432818 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=5.492716364 podStartE2EDuration="34.432794002s" podCreationTimestamp="2025-09-29 12:56:52 +0000 UTC" firstStartedPulling="2025-09-29 12:56:53.359938391 +0000 UTC m=+1000.251457997" lastFinishedPulling="2025-09-29 12:57:22.300016029 +0000 UTC m=+1029.191535635" observedRunningTime="2025-09-29 12:57:26.42854445 +0000 UTC m=+1033.320064076" watchObservedRunningTime="2025-09-29 12:57:26.432794002 +0000 UTC m=+1033.324313608" Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.532417 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=12.675927068 podStartE2EDuration="28.532397365s" podCreationTimestamp="2025-09-29 12:56:58 +0000 UTC" firstStartedPulling="2025-09-29 12:57:09.985024713 +0000 UTC m=+1016.876544319" lastFinishedPulling="2025-09-29 12:57:25.84149501 +0000 UTC m=+1032.733014616" observedRunningTime="2025-09-29 12:57:26.528927465 +0000 UTC m=+1033.420447081" watchObservedRunningTime="2025-09-29 12:57:26.532397365 +0000 UTC m=+1033.423916971" Sep 29 12:57:26 crc kubenswrapper[4611]: I0929 12:57:26.925048 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 12:57:29 crc kubenswrapper[4611]: I0929 12:57:29.925343 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 12:57:29 crc kubenswrapper[4611]: I0929 12:57:29.965672 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 12:57:30 crc kubenswrapper[4611]: I0929 12:57:30.427738 4611 generic.go:334] "Generic (PLEG): container finished" podID="46d0113e-4eb9-4b51-981e-744b6dd0842e" containerID="a7ba281eee43e16d8a51e606f407cea644a574ddde2575b77063b8c66ea272b2" exitCode=0 Sep 29 12:57:30 crc kubenswrapper[4611]: I0929 12:57:30.427827 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"46d0113e-4eb9-4b51-981e-744b6dd0842e","Type":"ContainerDied","Data":"a7ba281eee43e16d8a51e606f407cea644a574ddde2575b77063b8c66ea272b2"} Sep 29 12:57:31 crc kubenswrapper[4611]: I0929 12:57:31.436465 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"46d0113e-4eb9-4b51-981e-744b6dd0842e","Type":"ContainerStarted","Data":"f70496c25756298408957b61a2ae99c0d8963ca7265d6524fc66ffaced347343"} Sep 29 12:57:31 crc kubenswrapper[4611]: I0929 12:57:31.455926 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.596107175 podStartE2EDuration="42.45590536s" podCreationTimestamp="2025-09-29 12:56:49 +0000 UTC" firstStartedPulling="2025-09-29 12:56:51.874054081 +0000 UTC m=+998.765573687" lastFinishedPulling="2025-09-29 12:57:25.733852266 +0000 UTC m=+1032.625371872" observedRunningTime="2025-09-29 12:57:31.454234142 +0000 UTC m=+1038.345753748" watchObservedRunningTime="2025-09-29 12:57:31.45590536 +0000 UTC m=+1038.347424996" Sep 29 12:57:31 crc kubenswrapper[4611]: I0929 12:57:31.744770 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.527284 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.618924 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bf8496dff-sb4bg"] Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.694763 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bff7899c9-b58wp"] Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.696125 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.736296 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bff7899c9-b58wp"] Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.800861 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjtkt\" (UniqueName: \"kubernetes.io/projected/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-kube-api-access-bjtkt\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.801005 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-config\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.801067 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-dns-svc\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.903140 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-config\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.903211 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-dns-svc\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.903297 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjtkt\" (UniqueName: \"kubernetes.io/projected/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-kube-api-access-bjtkt\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.904221 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-config\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.904245 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-dns-svc\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:32 crc kubenswrapper[4611]: I0929 12:57:32.955308 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjtkt\" (UniqueName: \"kubernetes.io/projected/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-kube-api-access-bjtkt\") pod \"dnsmasq-dns-bff7899c9-b58wp\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.028158 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.107369 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.207448 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-dns-svc\") pod \"3368069f-c428-472b-a72c-f38f0aea7b4d\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.208003 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-config\") pod \"3368069f-c428-472b-a72c-f38f0aea7b4d\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.208099 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-599pz\" (UniqueName: \"kubernetes.io/projected/3368069f-c428-472b-a72c-f38f0aea7b4d-kube-api-access-599pz\") pod \"3368069f-c428-472b-a72c-f38f0aea7b4d\" (UID: \"3368069f-c428-472b-a72c-f38f0aea7b4d\") " Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.208580 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-config" (OuterVolumeSpecName: "config") pod "3368069f-c428-472b-a72c-f38f0aea7b4d" (UID: "3368069f-c428-472b-a72c-f38f0aea7b4d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.208603 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3368069f-c428-472b-a72c-f38f0aea7b4d" (UID: "3368069f-c428-472b-a72c-f38f0aea7b4d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.208919 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.208931 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3368069f-c428-472b-a72c-f38f0aea7b4d-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.229859 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3368069f-c428-472b-a72c-f38f0aea7b4d-kube-api-access-599pz" (OuterVolumeSpecName: "kube-api-access-599pz") pod "3368069f-c428-472b-a72c-f38f0aea7b4d" (UID: "3368069f-c428-472b-a72c-f38f0aea7b4d"). InnerVolumeSpecName "kube-api-access-599pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.310060 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-599pz\" (UniqueName: \"kubernetes.io/projected/3368069f-c428-472b-a72c-f38f0aea7b4d-kube-api-access-599pz\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.327754 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bff7899c9-b58wp"] Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.460846 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" event={"ID":"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1","Type":"ContainerStarted","Data":"7c3835e646b84159d369f598d56cb68847a44b82ae22783fc8ab727344ce3cd1"} Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.462127 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" event={"ID":"3368069f-c428-472b-a72c-f38f0aea7b4d","Type":"ContainerDied","Data":"0b735b3e7facb7e0a6131321904ee597c6aa60bbb8310380dc359f5f2edb2db7"} Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.462187 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf8496dff-sb4bg" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.515620 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bf8496dff-sb4bg"] Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.526520 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bf8496dff-sb4bg"] Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.715569 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.721899 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.723886 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-jxdxq" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.725590 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.730080 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.730132 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.746735 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3368069f-c428-472b-a72c-f38f0aea7b4d" path="/var/lib/kubelet/pods/3368069f-c428-472b-a72c-f38f0aea7b4d/volumes" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.757171 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.919899 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/458f3780-8709-4a3c-ac9e-9a1b5ced2172-cache\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.919950 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c2k5\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-kube-api-access-7c2k5\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.920128 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/458f3780-8709-4a3c-ac9e-9a1b5ced2172-lock\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.920159 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:33 crc kubenswrapper[4611]: I0929 12:57:33.920220 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.021425 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/458f3780-8709-4a3c-ac9e-9a1b5ced2172-lock\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.021474 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.021525 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.021547 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/458f3780-8709-4a3c-ac9e-9a1b5ced2172-cache\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.021565 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c2k5\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-kube-api-access-7c2k5\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.022701 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/458f3780-8709-4a3c-ac9e-9a1b5ced2172-lock\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.023540 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/458f3780-8709-4a3c-ac9e-9a1b5ced2172-cache\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.023584 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: E0929 12:57:34.023682 4611 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 12:57:34 crc kubenswrapper[4611]: E0929 12:57:34.023699 4611 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 12:57:34 crc kubenswrapper[4611]: E0929 12:57:34.024065 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift podName:458f3780-8709-4a3c-ac9e-9a1b5ced2172 nodeName:}" failed. No retries permitted until 2025-09-29 12:57:34.524036332 +0000 UTC m=+1041.415555968 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift") pod "swift-storage-0" (UID: "458f3780-8709-4a3c-ac9e-9a1b5ced2172") : configmap "swift-ring-files" not found Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.044261 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c2k5\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-kube-api-access-7c2k5\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.046880 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.295849 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-z9w9b"] Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.300973 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.303636 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.303956 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.304476 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.316070 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-z9w9b"] Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428002 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-ring-data-devices\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428093 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-scripts\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428131 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-dispersionconf\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428184 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv7qh\" (UniqueName: \"kubernetes.io/projected/b548cb20-950c-4d83-b7e1-c910375a4bf0-kube-api-access-cv7qh\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428213 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b548cb20-950c-4d83-b7e1-c910375a4bf0-etc-swift\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428257 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-combined-ca-bundle\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.428387 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-swiftconf\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.481636 4611 generic.go:334] "Generic (PLEG): container finished" podID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerID="c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370" exitCode=0 Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.481688 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" event={"ID":"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1","Type":"ContainerDied","Data":"c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370"} Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.538889 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539281 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-ring-data-devices\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539323 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-scripts\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539355 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-dispersionconf\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539398 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv7qh\" (UniqueName: \"kubernetes.io/projected/b548cb20-950c-4d83-b7e1-c910375a4bf0-kube-api-access-cv7qh\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539416 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b548cb20-950c-4d83-b7e1-c910375a4bf0-etc-swift\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539465 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-combined-ca-bundle\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: E0929 12:57:34.539481 4611 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 12:57:34 crc kubenswrapper[4611]: E0929 12:57:34.539533 4611 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.539540 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-swiftconf\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: E0929 12:57:34.539591 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift podName:458f3780-8709-4a3c-ac9e-9a1b5ced2172 nodeName:}" failed. No retries permitted until 2025-09-29 12:57:35.53957177 +0000 UTC m=+1042.431091376 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift") pod "swift-storage-0" (UID: "458f3780-8709-4a3c-ac9e-9a1b5ced2172") : configmap "swift-ring-files" not found Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.547250 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b548cb20-950c-4d83-b7e1-c910375a4bf0-etc-swift\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.548096 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-ring-data-devices\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.550234 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-scripts\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.561754 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-combined-ca-bundle\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.561802 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-swiftconf\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.571860 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-dispersionconf\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.574778 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv7qh\" (UniqueName: \"kubernetes.io/projected/b548cb20-950c-4d83-b7e1-c910375a4bf0-kube-api-access-cv7qh\") pod \"swift-ring-rebalance-z9w9b\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.617751 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:34 crc kubenswrapper[4611]: I0929 12:57:34.965233 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.104350 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-z9w9b"] Sep 29 12:57:35 crc kubenswrapper[4611]: W0929 12:57:35.107992 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb548cb20_950c_4d83_b7e1_c910375a4bf0.slice/crio-1b0eb9505080b26c39cc31b3aa5193eed9261a99c560041739dba5f9e6cf5d70 WatchSource:0}: Error finding container 1b0eb9505080b26c39cc31b3aa5193eed9261a99c560041739dba5f9e6cf5d70: Status 404 returned error can't find the container with id 1b0eb9505080b26c39cc31b3aa5193eed9261a99c560041739dba5f9e6cf5d70 Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.269335 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c98cdcf59-n2mtw"] Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.320449 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d967846b5-rssfj"] Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.322691 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.325616 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.354559 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d967846b5-rssfj"] Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.461211 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2xb7\" (UniqueName: \"kubernetes.io/projected/58db2db0-e626-4c37-8b9d-b27d39e47e8a-kube-api-access-x2xb7\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.461287 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-config\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.461316 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-dns-svc\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.461401 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-ovsdbserver-sb\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.518297 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" event={"ID":"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1","Type":"ContainerStarted","Data":"d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4"} Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.519830 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.522508 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z9w9b" event={"ID":"b548cb20-950c-4d83-b7e1-c910375a4bf0","Type":"ContainerStarted","Data":"1b0eb9505080b26c39cc31b3aa5193eed9261a99c560041739dba5f9e6cf5d70"} Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.537172 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" podStartSLOduration=3.271178297 podStartE2EDuration="3.537147027s" podCreationTimestamp="2025-09-29 12:57:32 +0000 UTC" firstStartedPulling="2025-09-29 12:57:33.332823589 +0000 UTC m=+1040.224343195" lastFinishedPulling="2025-09-29 12:57:33.598792319 +0000 UTC m=+1040.490311925" observedRunningTime="2025-09-29 12:57:35.533097391 +0000 UTC m=+1042.424616997" watchObservedRunningTime="2025-09-29 12:57:35.537147027 +0000 UTC m=+1042.428666633" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.563037 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2xb7\" (UniqueName: \"kubernetes.io/projected/58db2db0-e626-4c37-8b9d-b27d39e47e8a-kube-api-access-x2xb7\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.563097 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.563129 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-config\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.563155 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-dns-svc\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.563253 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-ovsdbserver-sb\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.564815 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-config\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.564820 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-ovsdbserver-sb\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: E0929 12:57:35.565030 4611 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 12:57:35 crc kubenswrapper[4611]: E0929 12:57:35.565051 4611 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 12:57:35 crc kubenswrapper[4611]: E0929 12:57:35.565103 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift podName:458f3780-8709-4a3c-ac9e-9a1b5ced2172 nodeName:}" failed. No retries permitted until 2025-09-29 12:57:37.565086703 +0000 UTC m=+1044.456606359 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift") pod "swift-storage-0" (UID: "458f3780-8709-4a3c-ac9e-9a1b5ced2172") : configmap "swift-ring-files" not found Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.566353 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-dns-svc\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.581935 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2xb7\" (UniqueName: \"kubernetes.io/projected/58db2db0-e626-4c37-8b9d-b27d39e47e8a-kube-api-access-x2xb7\") pod \"dnsmasq-dns-d967846b5-rssfj\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.652680 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.695599 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.765910 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-config\") pod \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.766238 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-dns-svc\") pod \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.766357 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8xgs\" (UniqueName: \"kubernetes.io/projected/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-kube-api-access-g8xgs\") pod \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\" (UID: \"9ef2ccbb-09cb-4074-858c-2ea164c83fa9\") " Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.767931 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-config" (OuterVolumeSpecName: "config") pod "9ef2ccbb-09cb-4074-858c-2ea164c83fa9" (UID: "9ef2ccbb-09cb-4074-858c-2ea164c83fa9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.767963 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9ef2ccbb-09cb-4074-858c-2ea164c83fa9" (UID: "9ef2ccbb-09cb-4074-858c-2ea164c83fa9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.770973 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-kube-api-access-g8xgs" (OuterVolumeSpecName: "kube-api-access-g8xgs") pod "9ef2ccbb-09cb-4074-858c-2ea164c83fa9" (UID: "9ef2ccbb-09cb-4074-858c-2ea164c83fa9"). InnerVolumeSpecName "kube-api-access-g8xgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.868334 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.868356 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:35 crc kubenswrapper[4611]: I0929 12:57:35.868367 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8xgs\" (UniqueName: \"kubernetes.io/projected/9ef2ccbb-09cb-4074-858c-2ea164c83fa9-kube-api-access-g8xgs\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.145976 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d967846b5-rssfj"] Sep 29 12:57:36 crc kubenswrapper[4611]: W0929 12:57:36.154370 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58db2db0_e626_4c37_8b9d_b27d39e47e8a.slice/crio-48b7df1b06025cbfea77af90b2235b0a9ba12873223a4584d312adee50a61cea WatchSource:0}: Error finding container 48b7df1b06025cbfea77af90b2235b0a9ba12873223a4584d312adee50a61cea: Status 404 returned error can't find the container with id 48b7df1b06025cbfea77af90b2235b0a9ba12873223a4584d312adee50a61cea Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.538121 4611 generic.go:334] "Generic (PLEG): container finished" podID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerID="28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130" exitCode=0 Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.538193 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d967846b5-rssfj" event={"ID":"58db2db0-e626-4c37-8b9d-b27d39e47e8a","Type":"ContainerDied","Data":"28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130"} Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.538224 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d967846b5-rssfj" event={"ID":"58db2db0-e626-4c37-8b9d-b27d39e47e8a","Type":"ContainerStarted","Data":"48b7df1b06025cbfea77af90b2235b0a9ba12873223a4584d312adee50a61cea"} Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.540950 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.540995 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c98cdcf59-n2mtw" event={"ID":"9ef2ccbb-09cb-4074-858c-2ea164c83fa9","Type":"ContainerDied","Data":"ea5a9ddc392e4619b4badd9f156ba191ec54dde9621283cf58c590ce7a8f8a21"} Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.638755 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c98cdcf59-n2mtw"] Sep 29 12:57:36 crc kubenswrapper[4611]: I0929 12:57:36.644528 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c98cdcf59-n2mtw"] Sep 29 12:57:37 crc kubenswrapper[4611]: I0929 12:57:37.571524 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d967846b5-rssfj" event={"ID":"58db2db0-e626-4c37-8b9d-b27d39e47e8a","Type":"ContainerStarted","Data":"5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a"} Sep 29 12:57:37 crc kubenswrapper[4611]: I0929 12:57:37.571882 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:37 crc kubenswrapper[4611]: I0929 12:57:37.613111 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d967846b5-rssfj" podStartSLOduration=2.613091185 podStartE2EDuration="2.613091185s" podCreationTimestamp="2025-09-29 12:57:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:57:37.61154773 +0000 UTC m=+1044.503067346" watchObservedRunningTime="2025-09-29 12:57:37.613091185 +0000 UTC m=+1044.504610781" Sep 29 12:57:37 crc kubenswrapper[4611]: I0929 12:57:37.619348 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:37 crc kubenswrapper[4611]: E0929 12:57:37.619717 4611 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 12:57:37 crc kubenswrapper[4611]: E0929 12:57:37.619740 4611 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 12:57:37 crc kubenswrapper[4611]: E0929 12:57:37.619791 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift podName:458f3780-8709-4a3c-ac9e-9a1b5ced2172 nodeName:}" failed. No retries permitted until 2025-09-29 12:57:41.619772808 +0000 UTC m=+1048.511292414 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift") pod "swift-storage-0" (UID: "458f3780-8709-4a3c-ac9e-9a1b5ced2172") : configmap "swift-ring-files" not found Sep 29 12:57:37 crc kubenswrapper[4611]: I0929 12:57:37.745698 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ef2ccbb-09cb-4074-858c-2ea164c83fa9" path="/var/lib/kubelet/pods/9ef2ccbb-09cb-4074-858c-2ea164c83fa9/volumes" Sep 29 12:57:40 crc kubenswrapper[4611]: I0929 12:57:40.942228 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 12:57:40 crc kubenswrapper[4611]: I0929 12:57:40.942577 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 12:57:41 crc kubenswrapper[4611]: I0929 12:57:41.077774 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 12:57:41 crc kubenswrapper[4611]: I0929 12:57:41.687417 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:41 crc kubenswrapper[4611]: E0929 12:57:41.687660 4611 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 12:57:41 crc kubenswrapper[4611]: E0929 12:57:41.688088 4611 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 12:57:41 crc kubenswrapper[4611]: E0929 12:57:41.688151 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift podName:458f3780-8709-4a3c-ac9e-9a1b5ced2172 nodeName:}" failed. No retries permitted until 2025-09-29 12:57:49.688129173 +0000 UTC m=+1056.579648779 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift") pod "swift-storage-0" (UID: "458f3780-8709-4a3c-ac9e-9a1b5ced2172") : configmap "swift-ring-files" not found Sep 29 12:57:41 crc kubenswrapper[4611]: I0929 12:57:41.845099 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 12:57:42 crc kubenswrapper[4611]: I0929 12:57:42.613471 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a8501653-441a-4c5f-b098-bc5fb7aeba22","Type":"ContainerStarted","Data":"3dfee8d7bb12af3564c72d9234b8e24eae27c1ab6e23a547e02cb38b0272c1c3"} Sep 29 12:57:42 crc kubenswrapper[4611]: I0929 12:57:42.616603 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"367a7739-cd0c-4a45-b804-1d763d6a55f4","Type":"ContainerStarted","Data":"2662cef5f539d027a6f728f6f4e19bd5d68db54b2951bca595c3a84f18c2c012"} Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.029787 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.624204 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt" event={"ID":"b42caf5f-0509-41a3-ab3c-49b5b2be817e","Type":"ContainerStarted","Data":"7836c37f355e3bec63733dc633d31fd7e318b8db59bed854e4daf5013a1d6fcf"} Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.624846 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-9jsdt" Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.626602 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z9w9b" event={"ID":"b548cb20-950c-4d83-b7e1-c910375a4bf0","Type":"ContainerStarted","Data":"80bf729448e223973e4f33e9e13d24abe79c46fbc795893b975d24fcf50a3163"} Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.631267 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9d360e26-9efd-4619-a0fc-77ac5eada7d0","Type":"ContainerStarted","Data":"c3c80abce550ac966f5bedc3e7a66b0a05fcacc2b883a854c72a16525f179995"} Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.633056 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qrlpz" event={"ID":"47b31f8c-66df-4172-8185-abba6357fc20","Type":"ContainerStarted","Data":"d10ccc613801f28ae0bfb4d7f0e7c55b7e9e721e475e7dc4170a874775fffbb2"} Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.653016 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-9jsdt" podStartSLOduration=7.664389909 podStartE2EDuration="49.652997806s" podCreationTimestamp="2025-09-29 12:56:54 +0000 UTC" firstStartedPulling="2025-09-29 12:57:00.531944781 +0000 UTC m=+1007.423464377" lastFinishedPulling="2025-09-29 12:57:42.520552668 +0000 UTC m=+1049.412072274" observedRunningTime="2025-09-29 12:57:43.650996848 +0000 UTC m=+1050.542516454" watchObservedRunningTime="2025-09-29 12:57:43.652997806 +0000 UTC m=+1050.544517412" Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.691110 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=8.197291247 podStartE2EDuration="49.691085154s" podCreationTimestamp="2025-09-29 12:56:54 +0000 UTC" firstStartedPulling="2025-09-29 12:57:01.243695617 +0000 UTC m=+1008.135215223" lastFinishedPulling="2025-09-29 12:57:42.737489524 +0000 UTC m=+1049.629009130" observedRunningTime="2025-09-29 12:57:43.689860809 +0000 UTC m=+1050.581380435" watchObservedRunningTime="2025-09-29 12:57:43.691085154 +0000 UTC m=+1050.582604760" Sep 29 12:57:43 crc kubenswrapper[4611]: I0929 12:57:43.709298 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-z9w9b" podStartSLOduration=2.30288417 podStartE2EDuration="9.709280419s" podCreationTimestamp="2025-09-29 12:57:34 +0000 UTC" firstStartedPulling="2025-09-29 12:57:35.109834984 +0000 UTC m=+1042.001354600" lastFinishedPulling="2025-09-29 12:57:42.516231243 +0000 UTC m=+1049.407750849" observedRunningTime="2025-09-29 12:57:43.706298093 +0000 UTC m=+1050.597817709" watchObservedRunningTime="2025-09-29 12:57:43.709280419 +0000 UTC m=+1050.600800025" Sep 29 12:57:44 crc kubenswrapper[4611]: I0929 12:57:44.159138 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 12:57:44 crc kubenswrapper[4611]: I0929 12:57:44.641058 4611 generic.go:334] "Generic (PLEG): container finished" podID="47b31f8c-66df-4172-8185-abba6357fc20" containerID="d10ccc613801f28ae0bfb4d7f0e7c55b7e9e721e475e7dc4170a874775fffbb2" exitCode=0 Sep 29 12:57:44 crc kubenswrapper[4611]: I0929 12:57:44.642243 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qrlpz" event={"ID":"47b31f8c-66df-4172-8185-abba6357fc20","Type":"ContainerDied","Data":"d10ccc613801f28ae0bfb4d7f0e7c55b7e9e721e475e7dc4170a874775fffbb2"} Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.652240 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qrlpz" event={"ID":"47b31f8c-66df-4172-8185-abba6357fc20","Type":"ContainerStarted","Data":"9cf07bd466a4ae53ba7c15b72e0b8f7ab582774ad4f75ad0ced3fd5207f75daa"} Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.652659 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qrlpz" event={"ID":"47b31f8c-66df-4172-8185-abba6357fc20","Type":"ContainerStarted","Data":"59abe36e0fee8ff62ac2a5bc129f04d7109eaf93560862d658e7864f1c2e6036"} Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.653913 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.653951 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.678786 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-qrlpz" podStartSLOduration=6.679734763 podStartE2EDuration="51.678767036s" podCreationTimestamp="2025-09-29 12:56:54 +0000 UTC" firstStartedPulling="2025-09-29 12:56:57.598369541 +0000 UTC m=+1004.489889157" lastFinishedPulling="2025-09-29 12:57:42.597401834 +0000 UTC m=+1049.488921430" observedRunningTime="2025-09-29 12:57:45.675180652 +0000 UTC m=+1052.566700258" watchObservedRunningTime="2025-09-29 12:57:45.678767036 +0000 UTC m=+1052.570286642" Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.696610 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.765518 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bff7899c9-b58wp"] Sep 29 12:57:45 crc kubenswrapper[4611]: I0929 12:57:45.766107 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerName="dnsmasq-dns" containerID="cri-o://d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4" gracePeriod=10 Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.159497 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.314372 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.414476 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-dns-svc\") pod \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.414564 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-config\") pod \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.414604 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjtkt\" (UniqueName: \"kubernetes.io/projected/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-kube-api-access-bjtkt\") pod \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\" (UID: \"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1\") " Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.420756 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-kube-api-access-bjtkt" (OuterVolumeSpecName: "kube-api-access-bjtkt") pod "e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" (UID: "e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1"). InnerVolumeSpecName "kube-api-access-bjtkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.468893 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" (UID: "e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.488400 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-config" (OuterVolumeSpecName: "config") pod "e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" (UID: "e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.516663 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.516699 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.516714 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjtkt\" (UniqueName: \"kubernetes.io/projected/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1-kube-api-access-bjtkt\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.662719 4611 generic.go:334] "Generic (PLEG): container finished" podID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerID="d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4" exitCode=0 Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.663634 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.663757 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" event={"ID":"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1","Type":"ContainerDied","Data":"d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4"} Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.663791 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bff7899c9-b58wp" event={"ID":"e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1","Type":"ContainerDied","Data":"7c3835e646b84159d369f598d56cb68847a44b82ae22783fc8ab727344ce3cd1"} Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.663807 4611 scope.go:117] "RemoveContainer" containerID="d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.688595 4611 scope.go:117] "RemoveContainer" containerID="c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.699954 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bff7899c9-b58wp"] Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.708372 4611 scope.go:117] "RemoveContainer" containerID="d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.708508 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bff7899c9-b58wp"] Sep 29 12:57:46 crc kubenswrapper[4611]: E0929 12:57:46.709271 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4\": container with ID starting with d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4 not found: ID does not exist" containerID="d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.709301 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4"} err="failed to get container status \"d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4\": rpc error: code = NotFound desc = could not find container \"d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4\": container with ID starting with d4f26bdc40ae3992b3789b8c58f7799fa40e4e0b72136715c8ae91de8a71ebd4 not found: ID does not exist" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.709322 4611 scope.go:117] "RemoveContainer" containerID="c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370" Sep 29 12:57:46 crc kubenswrapper[4611]: E0929 12:57:46.709860 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370\": container with ID starting with c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370 not found: ID does not exist" containerID="c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370" Sep 29 12:57:46 crc kubenswrapper[4611]: I0929 12:57:46.709888 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370"} err="failed to get container status \"c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370\": rpc error: code = NotFound desc = could not find container \"c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370\": container with ID starting with c220c8cefc756b8fabc73d82f703a921067b003637f6b8674d73f9d87ad44370 not found: ID does not exist" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.217203 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.275926 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.573568 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69fd5f8c87-pkvjk"] Sep 29 12:57:47 crc kubenswrapper[4611]: E0929 12:57:47.574953 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerName="init" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.575077 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerName="init" Sep 29 12:57:47 crc kubenswrapper[4611]: E0929 12:57:47.575172 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerName="dnsmasq-dns" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.575284 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerName="dnsmasq-dns" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.575583 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" containerName="dnsmasq-dns" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.577001 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.582650 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.605217 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69fd5f8c87-pkvjk"] Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.635937 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-config\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.636249 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-nb\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.636357 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-sb\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.636469 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcrnm\" (UniqueName: \"kubernetes.io/projected/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-kube-api-access-kcrnm\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.636587 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-dns-svc\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.664566 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-6dcc6c48fd-xwrxv"] Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.667198 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.672044 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-vq4lw" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.677419 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.677419 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.700032 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-6dcc6c48fd-xwrxv"] Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.738033 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-dns-svc\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.738166 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-config\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.738217 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-nb\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.738240 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-sb\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.738265 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcrnm\" (UniqueName: \"kubernetes.io/projected/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-kube-api-access-kcrnm\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.739673 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-dns-svc\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.740339 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-sb\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.740782 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-nb\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.741504 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-config\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.752863 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1" path="/var/lib/kubelet/pods/e08f11c1-a5b8-4a7f-a2d6-ca6988d244d1/volumes" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.757478 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcrnm\" (UniqueName: \"kubernetes.io/projected/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-kube-api-access-kcrnm\") pod \"dnsmasq-dns-69fd5f8c87-pkvjk\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.839807 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7prmm\" (UniqueName: \"kubernetes.io/projected/3a3e35e4-6b43-415c-871d-ab6903b9d24a-kube-api-access-7prmm\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.840011 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3a3e35e4-6b43-415c-871d-ab6903b9d24a-scripts\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.840142 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3e35e4-6b43-415c-871d-ab6903b9d24a-combined-ca-bundle\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.840203 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a3e35e4-6b43-415c-871d-ab6903b9d24a-ovn-northd-tls-certs\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.903792 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.941483 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3a3e35e4-6b43-415c-871d-ab6903b9d24a-scripts\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.941850 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3e35e4-6b43-415c-871d-ab6903b9d24a-combined-ca-bundle\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.941885 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a3e35e4-6b43-415c-871d-ab6903b9d24a-ovn-northd-tls-certs\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.941931 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7prmm\" (UniqueName: \"kubernetes.io/projected/3a3e35e4-6b43-415c-871d-ab6903b9d24a-kube-api-access-7prmm\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.943357 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3a3e35e4-6b43-415c-871d-ab6903b9d24a-scripts\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.949131 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3e35e4-6b43-415c-871d-ab6903b9d24a-combined-ca-bundle\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.952238 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a3e35e4-6b43-415c-871d-ab6903b9d24a-ovn-northd-tls-certs\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:47 crc kubenswrapper[4611]: I0929 12:57:47.981411 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7prmm\" (UniqueName: \"kubernetes.io/projected/3a3e35e4-6b43-415c-871d-ab6903b9d24a-kube-api-access-7prmm\") pod \"ovn-northd-6dcc6c48fd-xwrxv\" (UID: \"3a3e35e4-6b43-415c-871d-ab6903b9d24a\") " pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:48 crc kubenswrapper[4611]: I0929 12:57:48.000999 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:48 crc kubenswrapper[4611]: I0929 12:57:48.629269 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69fd5f8c87-pkvjk"] Sep 29 12:57:48 crc kubenswrapper[4611]: I0929 12:57:48.685660 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-6dcc6c48fd-xwrxv"] Sep 29 12:57:48 crc kubenswrapper[4611]: I0929 12:57:48.690351 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" event={"ID":"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10","Type":"ContainerStarted","Data":"8c4dcb102197ceb25fb3caf666fb95236e3020bbf5f817252c1b5815aa4a612b"} Sep 29 12:57:48 crc kubenswrapper[4611]: W0929 12:57:48.699856 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a3e35e4_6b43_415c_871d_ab6903b9d24a.slice/crio-b270c76deb6ad5b3634b4ca272366401e8705c7a8c7c5b5ba924d9f60e4aae03 WatchSource:0}: Error finding container b270c76deb6ad5b3634b4ca272366401e8705c7a8c7c5b5ba924d9f60e4aae03: Status 404 returned error can't find the container with id b270c76deb6ad5b3634b4ca272366401e8705c7a8c7c5b5ba924d9f60e4aae03 Sep 29 12:57:49 crc kubenswrapper[4611]: I0929 12:57:49.700960 4611 generic.go:334] "Generic (PLEG): container finished" podID="367a7739-cd0c-4a45-b804-1d763d6a55f4" containerID="2662cef5f539d027a6f728f6f4e19bd5d68db54b2951bca595c3a84f18c2c012" exitCode=0 Sep 29 12:57:49 crc kubenswrapper[4611]: I0929 12:57:49.701050 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"367a7739-cd0c-4a45-b804-1d763d6a55f4","Type":"ContainerDied","Data":"2662cef5f539d027a6f728f6f4e19bd5d68db54b2951bca595c3a84f18c2c012"} Sep 29 12:57:49 crc kubenswrapper[4611]: I0929 12:57:49.702810 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" event={"ID":"3a3e35e4-6b43-415c-871d-ab6903b9d24a","Type":"ContainerStarted","Data":"b270c76deb6ad5b3634b4ca272366401e8705c7a8c7c5b5ba924d9f60e4aae03"} Sep 29 12:57:49 crc kubenswrapper[4611]: I0929 12:57:49.707323 4611 generic.go:334] "Generic (PLEG): container finished" podID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerID="685464c25d9581d4db8c7f3e8be1632d730716638233b5e61033e00eb32d2661" exitCode=0 Sep 29 12:57:49 crc kubenswrapper[4611]: I0929 12:57:49.707358 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" event={"ID":"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10","Type":"ContainerDied","Data":"685464c25d9581d4db8c7f3e8be1632d730716638233b5e61033e00eb32d2661"} Sep 29 12:57:49 crc kubenswrapper[4611]: I0929 12:57:49.791746 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:57:49 crc kubenswrapper[4611]: E0929 12:57:49.792493 4611 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 12:57:49 crc kubenswrapper[4611]: E0929 12:57:49.792513 4611 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 12:57:49 crc kubenswrapper[4611]: E0929 12:57:49.792565 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift podName:458f3780-8709-4a3c-ac9e-9a1b5ced2172 nodeName:}" failed. No retries permitted until 2025-09-29 12:58:05.792544451 +0000 UTC m=+1072.684064057 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift") pod "swift-storage-0" (UID: "458f3780-8709-4a3c-ac9e-9a1b5ced2172") : configmap "swift-ring-files" not found Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.717222 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" event={"ID":"3a3e35e4-6b43-415c-871d-ab6903b9d24a","Type":"ContainerStarted","Data":"e0fe037f8609df51a3c61087cb5a8765fb25d55c7dabbfd7777407bb2377691a"} Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.717608 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.720067 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" event={"ID":"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10","Type":"ContainerStarted","Data":"2fcd8e048f1f7869c8186ec4cdd8be6001eb3d8ebb88d7e8629e89ae8bbe8a27"} Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.720487 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.722277 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"367a7739-cd0c-4a45-b804-1d763d6a55f4","Type":"ContainerStarted","Data":"c9dbee0004a9145abafedc288d72c3a9019cc0616fcd1c0a266dc21be6600dcb"} Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.736405 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" podStartSLOduration=2.917579458 podStartE2EDuration="3.73639025s" podCreationTimestamp="2025-09-29 12:57:47 +0000 UTC" firstStartedPulling="2025-09-29 12:57:48.701201899 +0000 UTC m=+1055.592721505" lastFinishedPulling="2025-09-29 12:57:49.520012691 +0000 UTC m=+1056.411532297" observedRunningTime="2025-09-29 12:57:50.736000349 +0000 UTC m=+1057.627519955" watchObservedRunningTime="2025-09-29 12:57:50.73639025 +0000 UTC m=+1057.627909856" Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.761350 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371975.093445 podStartE2EDuration="1m1.761330159s" podCreationTimestamp="2025-09-29 12:56:49 +0000 UTC" firstStartedPulling="2025-09-29 12:56:51.592769569 +0000 UTC m=+998.484289175" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:57:50.755270875 +0000 UTC m=+1057.646790481" watchObservedRunningTime="2025-09-29 12:57:50.761330159 +0000 UTC m=+1057.652849765" Sep 29 12:57:50 crc kubenswrapper[4611]: I0929 12:57:50.792274 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" podStartSLOduration=3.792253921 podStartE2EDuration="3.792253921s" podCreationTimestamp="2025-09-29 12:57:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:57:50.790382477 +0000 UTC m=+1057.681902093" watchObservedRunningTime="2025-09-29 12:57:50.792253921 +0000 UTC m=+1057.683773527" Sep 29 12:57:54 crc kubenswrapper[4611]: I0929 12:57:54.753803 4611 generic.go:334] "Generic (PLEG): container finished" podID="b548cb20-950c-4d83-b7e1-c910375a4bf0" containerID="80bf729448e223973e4f33e9e13d24abe79c46fbc795893b975d24fcf50a3163" exitCode=0 Sep 29 12:57:54 crc kubenswrapper[4611]: I0929 12:57:54.753891 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z9w9b" event={"ID":"b548cb20-950c-4d83-b7e1-c910375a4bf0","Type":"ContainerDied","Data":"80bf729448e223973e4f33e9e13d24abe79c46fbc795893b975d24fcf50a3163"} Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.075582 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211091 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-ring-data-devices\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211144 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-swiftconf\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211180 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-scripts\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211216 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-dispersionconf\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211370 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv7qh\" (UniqueName: \"kubernetes.io/projected/b548cb20-950c-4d83-b7e1-c910375a4bf0-kube-api-access-cv7qh\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211415 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b548cb20-950c-4d83-b7e1-c910375a4bf0-etc-swift\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.211435 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-combined-ca-bundle\") pod \"b548cb20-950c-4d83-b7e1-c910375a4bf0\" (UID: \"b548cb20-950c-4d83-b7e1-c910375a4bf0\") " Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.213010 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b548cb20-950c-4d83-b7e1-c910375a4bf0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.213227 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.223876 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.226194 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b548cb20-950c-4d83-b7e1-c910375a4bf0-kube-api-access-cv7qh" (OuterVolumeSpecName: "kube-api-access-cv7qh") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "kube-api-access-cv7qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.236975 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-scripts" (OuterVolumeSpecName: "scripts") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.239070 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.240750 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b548cb20-950c-4d83-b7e1-c910375a4bf0" (UID: "b548cb20-950c-4d83-b7e1-c910375a4bf0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313262 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv7qh\" (UniqueName: \"kubernetes.io/projected/b548cb20-950c-4d83-b7e1-c910375a4bf0-kube-api-access-cv7qh\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313297 4611 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b548cb20-950c-4d83-b7e1-c910375a4bf0-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313310 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313325 4611 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313337 4611 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313348 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b548cb20-950c-4d83-b7e1-c910375a4bf0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.313359 4611 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b548cb20-950c-4d83-b7e1-c910375a4bf0-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.770236 4611 generic.go:334] "Generic (PLEG): container finished" podID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerID="be1f39ea68722bb92f5a313b14311073c5463b6ee64113518c5a704781fb9c26" exitCode=0 Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.770344 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"eb4ad743-3387-43bc-b15d-e3d4b0825793","Type":"ContainerDied","Data":"be1f39ea68722bb92f5a313b14311073c5463b6ee64113518c5a704781fb9c26"} Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.772590 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z9w9b" event={"ID":"b548cb20-950c-4d83-b7e1-c910375a4bf0","Type":"ContainerDied","Data":"1b0eb9505080b26c39cc31b3aa5193eed9261a99c560041739dba5f9e6cf5d70"} Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.772642 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b0eb9505080b26c39cc31b3aa5193eed9261a99c560041739dba5f9e6cf5d70" Sep 29 12:57:56 crc kubenswrapper[4611]: I0929 12:57:56.772705 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z9w9b" Sep 29 12:57:57 crc kubenswrapper[4611]: I0929 12:57:57.782757 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"eb4ad743-3387-43bc-b15d-e3d4b0825793","Type":"ContainerStarted","Data":"626c6b63a0bf75740e17a5a7ea410d7433099baf1d9cee485485a254e2cac77c"} Sep 29 12:57:57 crc kubenswrapper[4611]: I0929 12:57:57.783951 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 12:57:57 crc kubenswrapper[4611]: I0929 12:57:57.819169 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.821391777 podStartE2EDuration="1m11.819150046s" podCreationTimestamp="2025-09-29 12:56:46 +0000 UTC" firstStartedPulling="2025-09-29 12:56:48.722591527 +0000 UTC m=+995.614111123" lastFinishedPulling="2025-09-29 12:57:25.720349786 +0000 UTC m=+1032.611869392" observedRunningTime="2025-09-29 12:57:57.817510019 +0000 UTC m=+1064.709029625" watchObservedRunningTime="2025-09-29 12:57:57.819150046 +0000 UTC m=+1064.710669652" Sep 29 12:57:57 crc kubenswrapper[4611]: I0929 12:57:57.904819 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:57:57 crc kubenswrapper[4611]: I0929 12:57:57.993958 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d967846b5-rssfj"] Sep 29 12:57:57 crc kubenswrapper[4611]: I0929 12:57:57.994193 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d967846b5-rssfj" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerName="dnsmasq-dns" containerID="cri-o://5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a" gracePeriod=10 Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.648007 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.749280 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-ovsdbserver-sb\") pod \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.750283 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-config\") pod \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.750307 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-dns-svc\") pod \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.750340 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2xb7\" (UniqueName: \"kubernetes.io/projected/58db2db0-e626-4c37-8b9d-b27d39e47e8a-kube-api-access-x2xb7\") pod \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\" (UID: \"58db2db0-e626-4c37-8b9d-b27d39e47e8a\") " Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.768859 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58db2db0-e626-4c37-8b9d-b27d39e47e8a-kube-api-access-x2xb7" (OuterVolumeSpecName: "kube-api-access-x2xb7") pod "58db2db0-e626-4c37-8b9d-b27d39e47e8a" (UID: "58db2db0-e626-4c37-8b9d-b27d39e47e8a"). InnerVolumeSpecName "kube-api-access-x2xb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.838400 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-config" (OuterVolumeSpecName: "config") pod "58db2db0-e626-4c37-8b9d-b27d39e47e8a" (UID: "58db2db0-e626-4c37-8b9d-b27d39e47e8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.852606 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.852648 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2xb7\" (UniqueName: \"kubernetes.io/projected/58db2db0-e626-4c37-8b9d-b27d39e47e8a-kube-api-access-x2xb7\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.856084 4611 generic.go:334] "Generic (PLEG): container finished" podID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerID="5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a" exitCode=0 Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.856422 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d967846b5-rssfj" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.857008 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d967846b5-rssfj" event={"ID":"58db2db0-e626-4c37-8b9d-b27d39e47e8a","Type":"ContainerDied","Data":"5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a"} Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.857042 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d967846b5-rssfj" event={"ID":"58db2db0-e626-4c37-8b9d-b27d39e47e8a","Type":"ContainerDied","Data":"48b7df1b06025cbfea77af90b2235b0a9ba12873223a4584d312adee50a61cea"} Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.857061 4611 scope.go:117] "RemoveContainer" containerID="5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.864250 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "58db2db0-e626-4c37-8b9d-b27d39e47e8a" (UID: "58db2db0-e626-4c37-8b9d-b27d39e47e8a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.892036 4611 scope.go:117] "RemoveContainer" containerID="28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.907769 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "58db2db0-e626-4c37-8b9d-b27d39e47e8a" (UID: "58db2db0-e626-4c37-8b9d-b27d39e47e8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.957345 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.957385 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58db2db0-e626-4c37-8b9d-b27d39e47e8a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.974418 4611 scope.go:117] "RemoveContainer" containerID="5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a" Sep 29 12:57:58 crc kubenswrapper[4611]: E0929 12:57:58.978589 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a\": container with ID starting with 5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a not found: ID does not exist" containerID="5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.978648 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a"} err="failed to get container status \"5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a\": rpc error: code = NotFound desc = could not find container \"5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a\": container with ID starting with 5981319c8abc0fc51c2cb2204f1e8e05cc26fab83502b58b902a588d96aef67a not found: ID does not exist" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.978678 4611 scope.go:117] "RemoveContainer" containerID="28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130" Sep 29 12:57:58 crc kubenswrapper[4611]: E0929 12:57:58.986110 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130\": container with ID starting with 28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130 not found: ID does not exist" containerID="28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130" Sep 29 12:57:58 crc kubenswrapper[4611]: I0929 12:57:58.986162 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130"} err="failed to get container status \"28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130\": rpc error: code = NotFound desc = could not find container \"28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130\": container with ID starting with 28a35fdd312ec7ae5c77e434fa795eafb21b07cecc8bf16666aca347e7d46130 not found: ID does not exist" Sep 29 12:57:59 crc kubenswrapper[4611]: I0929 12:57:59.185861 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d967846b5-rssfj"] Sep 29 12:57:59 crc kubenswrapper[4611]: I0929 12:57:59.198382 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d967846b5-rssfj"] Sep 29 12:57:59 crc kubenswrapper[4611]: I0929 12:57:59.746139 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" path="/var/lib/kubelet/pods/58db2db0-e626-4c37-8b9d-b27d39e47e8a/volumes" Sep 29 12:58:00 crc kubenswrapper[4611]: I0929 12:58:00.503093 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 12:58:00 crc kubenswrapper[4611]: I0929 12:58:00.503153 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 12:58:00 crc kubenswrapper[4611]: I0929 12:58:00.564917 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 12:58:00 crc kubenswrapper[4611]: I0929 12:58:00.971325 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.400979 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-q64bh"] Sep 29 12:58:01 crc kubenswrapper[4611]: E0929 12:58:01.401323 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerName="init" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.401340 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerName="init" Sep 29 12:58:01 crc kubenswrapper[4611]: E0929 12:58:01.401363 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerName="dnsmasq-dns" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.401370 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerName="dnsmasq-dns" Sep 29 12:58:01 crc kubenswrapper[4611]: E0929 12:58:01.401381 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b548cb20-950c-4d83-b7e1-c910375a4bf0" containerName="swift-ring-rebalance" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.401388 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b548cb20-950c-4d83-b7e1-c910375a4bf0" containerName="swift-ring-rebalance" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.401526 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="58db2db0-e626-4c37-8b9d-b27d39e47e8a" containerName="dnsmasq-dns" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.401537 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b548cb20-950c-4d83-b7e1-c910375a4bf0" containerName="swift-ring-rebalance" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.402076 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q64bh" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.416598 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-q64bh"] Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.496473 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrs5j\" (UniqueName: \"kubernetes.io/projected/9e5e64a3-589f-4f55-a0fa-491c0ac42120-kube-api-access-mrs5j\") pod \"placement-db-create-q64bh\" (UID: \"9e5e64a3-589f-4f55-a0fa-491c0ac42120\") " pod="openstack/placement-db-create-q64bh" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.598910 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrs5j\" (UniqueName: \"kubernetes.io/projected/9e5e64a3-589f-4f55-a0fa-491c0ac42120-kube-api-access-mrs5j\") pod \"placement-db-create-q64bh\" (UID: \"9e5e64a3-589f-4f55-a0fa-491c0ac42120\") " pod="openstack/placement-db-create-q64bh" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.623451 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrs5j\" (UniqueName: \"kubernetes.io/projected/9e5e64a3-589f-4f55-a0fa-491c0ac42120-kube-api-access-mrs5j\") pod \"placement-db-create-q64bh\" (UID: \"9e5e64a3-589f-4f55-a0fa-491c0ac42120\") " pod="openstack/placement-db-create-q64bh" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.720722 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q64bh" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.925051 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-s2w2b"] Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.926931 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:01 crc kubenswrapper[4611]: I0929 12:58:01.952351 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-s2w2b"] Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.004718 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwffk\" (UniqueName: \"kubernetes.io/projected/b11bc76a-123b-4356-9976-52ff27cea6f2-kube-api-access-kwffk\") pod \"glance-db-create-s2w2b\" (UID: \"b11bc76a-123b-4356-9976-52ff27cea6f2\") " pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.107014 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwffk\" (UniqueName: \"kubernetes.io/projected/b11bc76a-123b-4356-9976-52ff27cea6f2-kube-api-access-kwffk\") pod \"glance-db-create-s2w2b\" (UID: \"b11bc76a-123b-4356-9976-52ff27cea6f2\") " pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.128273 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwffk\" (UniqueName: \"kubernetes.io/projected/b11bc76a-123b-4356-9976-52ff27cea6f2-kube-api-access-kwffk\") pod \"glance-db-create-s2w2b\" (UID: \"b11bc76a-123b-4356-9976-52ff27cea6f2\") " pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.245403 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-q64bh"] Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.269064 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.894879 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q64bh" event={"ID":"9e5e64a3-589f-4f55-a0fa-491c0ac42120","Type":"ContainerStarted","Data":"87d09ebea5c24fe79c80f71eadd4d3db8897feb83ae141ebb36d8bdf7b466229"} Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.895371 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q64bh" event={"ID":"9e5e64a3-589f-4f55-a0fa-491c0ac42120","Type":"ContainerStarted","Data":"b3cca098ea6b3fdfbc2b276333631e1e4a8de30a5ef02a3f801402491ec3571d"} Sep 29 12:58:02 crc kubenswrapper[4611]: I0929 12:58:02.918602 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-q64bh" podStartSLOduration=1.9185773959999999 podStartE2EDuration="1.918577396s" podCreationTimestamp="2025-09-29 12:58:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:02.913233462 +0000 UTC m=+1069.804753068" watchObservedRunningTime="2025-09-29 12:58:02.918577396 +0000 UTC m=+1069.810097002" Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.019712 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-s2w2b"] Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.084577 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-6dcc6c48fd-xwrxv" Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.903599 4611 generic.go:334] "Generic (PLEG): container finished" podID="9e5e64a3-589f-4f55-a0fa-491c0ac42120" containerID="87d09ebea5c24fe79c80f71eadd4d3db8897feb83ae141ebb36d8bdf7b466229" exitCode=0 Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.903680 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q64bh" event={"ID":"9e5e64a3-589f-4f55-a0fa-491c0ac42120","Type":"ContainerDied","Data":"87d09ebea5c24fe79c80f71eadd4d3db8897feb83ae141ebb36d8bdf7b466229"} Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.906429 4611 generic.go:334] "Generic (PLEG): container finished" podID="b11bc76a-123b-4356-9976-52ff27cea6f2" containerID="80260896e036eda75e10f57be967de18b9407d131cbf352e19d2e5f822f335d3" exitCode=0 Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.906491 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2w2b" event={"ID":"b11bc76a-123b-4356-9976-52ff27cea6f2","Type":"ContainerDied","Data":"80260896e036eda75e10f57be967de18b9407d131cbf352e19d2e5f822f335d3"} Sep 29 12:58:03 crc kubenswrapper[4611]: I0929 12:58:03.906523 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2w2b" event={"ID":"b11bc76a-123b-4356-9976-52ff27cea6f2","Type":"ContainerStarted","Data":"3a6e68b8013d28c70e018d9f7e992d62324485bbd17b17900bdd35effe31462f"} Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.348987 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.358504 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q64bh" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.467690 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrs5j\" (UniqueName: \"kubernetes.io/projected/9e5e64a3-589f-4f55-a0fa-491c0ac42120-kube-api-access-mrs5j\") pod \"9e5e64a3-589f-4f55-a0fa-491c0ac42120\" (UID: \"9e5e64a3-589f-4f55-a0fa-491c0ac42120\") " Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.467836 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwffk\" (UniqueName: \"kubernetes.io/projected/b11bc76a-123b-4356-9976-52ff27cea6f2-kube-api-access-kwffk\") pod \"b11bc76a-123b-4356-9976-52ff27cea6f2\" (UID: \"b11bc76a-123b-4356-9976-52ff27cea6f2\") " Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.474920 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e5e64a3-589f-4f55-a0fa-491c0ac42120-kube-api-access-mrs5j" (OuterVolumeSpecName: "kube-api-access-mrs5j") pod "9e5e64a3-589f-4f55-a0fa-491c0ac42120" (UID: "9e5e64a3-589f-4f55-a0fa-491c0ac42120"). InnerVolumeSpecName "kube-api-access-mrs5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.475052 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11bc76a-123b-4356-9976-52ff27cea6f2-kube-api-access-kwffk" (OuterVolumeSpecName: "kube-api-access-kwffk") pod "b11bc76a-123b-4356-9976-52ff27cea6f2" (UID: "b11bc76a-123b-4356-9976-52ff27cea6f2"). InnerVolumeSpecName "kube-api-access-kwffk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.569972 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrs5j\" (UniqueName: \"kubernetes.io/projected/9e5e64a3-589f-4f55-a0fa-491c0ac42120-kube-api-access-mrs5j\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.570012 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwffk\" (UniqueName: \"kubernetes.io/projected/b11bc76a-123b-4356-9976-52ff27cea6f2-kube-api-access-kwffk\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.875585 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.880547 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/458f3780-8709-4a3c-ac9e-9a1b5ced2172-etc-swift\") pod \"swift-storage-0\" (UID: \"458f3780-8709-4a3c-ac9e-9a1b5ced2172\") " pod="openstack/swift-storage-0" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.923134 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q64bh" event={"ID":"9e5e64a3-589f-4f55-a0fa-491c0ac42120","Type":"ContainerDied","Data":"b3cca098ea6b3fdfbc2b276333631e1e4a8de30a5ef02a3f801402491ec3571d"} Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.923376 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3cca098ea6b3fdfbc2b276333631e1e4a8de30a5ef02a3f801402491ec3571d" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.924176 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2w2b" event={"ID":"b11bc76a-123b-4356-9976-52ff27cea6f2","Type":"ContainerDied","Data":"3a6e68b8013d28c70e018d9f7e992d62324485bbd17b17900bdd35effe31462f"} Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.924198 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a6e68b8013d28c70e018d9f7e992d62324485bbd17b17900bdd35effe31462f" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.924226 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2w2b" Sep 29 12:58:05 crc kubenswrapper[4611]: I0929 12:58:05.924238 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q64bh" Sep 29 12:58:06 crc kubenswrapper[4611]: I0929 12:58:06.139147 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 12:58:06 crc kubenswrapper[4611]: I0929 12:58:06.817371 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 12:58:06 crc kubenswrapper[4611]: W0929 12:58:06.823833 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod458f3780_8709_4a3c_ac9e_9a1b5ced2172.slice/crio-56dab6c4b47a175008ca82af0e58b234c03c44054c0c2fcedee7440387fc4195 WatchSource:0}: Error finding container 56dab6c4b47a175008ca82af0e58b234c03c44054c0c2fcedee7440387fc4195: Status 404 returned error can't find the container with id 56dab6c4b47a175008ca82af0e58b234c03c44054c0c2fcedee7440387fc4195 Sep 29 12:58:06 crc kubenswrapper[4611]: I0929 12:58:06.933498 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"56dab6c4b47a175008ca82af0e58b234c03c44054c0c2fcedee7440387fc4195"} Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.027868 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.384683 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mmxgc"] Sep 29 12:58:08 crc kubenswrapper[4611]: E0929 12:58:08.385436 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5e64a3-589f-4f55-a0fa-491c0ac42120" containerName="mariadb-database-create" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.385458 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5e64a3-589f-4f55-a0fa-491c0ac42120" containerName="mariadb-database-create" Sep 29 12:58:08 crc kubenswrapper[4611]: E0929 12:58:08.385502 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b11bc76a-123b-4356-9976-52ff27cea6f2" containerName="mariadb-database-create" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.385512 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b11bc76a-123b-4356-9976-52ff27cea6f2" containerName="mariadb-database-create" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.385734 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5e64a3-589f-4f55-a0fa-491c0ac42120" containerName="mariadb-database-create" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.385763 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b11bc76a-123b-4356-9976-52ff27cea6f2" containerName="mariadb-database-create" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.386425 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.418418 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mmxgc"] Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.516523 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-m4vwf"] Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.519399 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.523244 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmx2j\" (UniqueName: \"kubernetes.io/projected/6655f4be-50ff-4ca4-aec0-82d21d2cc552-kube-api-access-nmx2j\") pod \"cinder-db-create-mmxgc\" (UID: \"6655f4be-50ff-4ca4-aec0-82d21d2cc552\") " pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.559890 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-m4vwf"] Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.628255 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgc86\" (UniqueName: \"kubernetes.io/projected/ac707613-fa2b-4406-8789-2c0a072d49f5-kube-api-access-cgc86\") pod \"barbican-db-create-m4vwf\" (UID: \"ac707613-fa2b-4406-8789-2c0a072d49f5\") " pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.628373 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmx2j\" (UniqueName: \"kubernetes.io/projected/6655f4be-50ff-4ca4-aec0-82d21d2cc552-kube-api-access-nmx2j\") pod \"cinder-db-create-mmxgc\" (UID: \"6655f4be-50ff-4ca4-aec0-82d21d2cc552\") " pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.657980 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmx2j\" (UniqueName: \"kubernetes.io/projected/6655f4be-50ff-4ca4-aec0-82d21d2cc552-kube-api-access-nmx2j\") pod \"cinder-db-create-mmxgc\" (UID: \"6655f4be-50ff-4ca4-aec0-82d21d2cc552\") " pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.720449 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.728311 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-6kfbn"] Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.729413 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.731414 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgc86\" (UniqueName: \"kubernetes.io/projected/ac707613-fa2b-4406-8789-2c0a072d49f5-kube-api-access-cgc86\") pod \"barbican-db-create-m4vwf\" (UID: \"ac707613-fa2b-4406-8789-2c0a072d49f5\") " pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.754078 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgc86\" (UniqueName: \"kubernetes.io/projected/ac707613-fa2b-4406-8789-2c0a072d49f5-kube-api-access-cgc86\") pod \"barbican-db-create-m4vwf\" (UID: \"ac707613-fa2b-4406-8789-2c0a072d49f5\") " pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.759116 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-6kfbn"] Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.832770 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwbnt\" (UniqueName: \"kubernetes.io/projected/717832b2-73e3-4b2c-8f82-26603268ee98-kube-api-access-mwbnt\") pod \"neutron-db-create-6kfbn\" (UID: \"717832b2-73e3-4b2c-8f82-26603268ee98\") " pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.843920 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.934361 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwbnt\" (UniqueName: \"kubernetes.io/projected/717832b2-73e3-4b2c-8f82-26603268ee98-kube-api-access-mwbnt\") pod \"neutron-db-create-6kfbn\" (UID: \"717832b2-73e3-4b2c-8f82-26603268ee98\") " pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.972432 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwbnt\" (UniqueName: \"kubernetes.io/projected/717832b2-73e3-4b2c-8f82-26603268ee98-kube-api-access-mwbnt\") pod \"neutron-db-create-6kfbn\" (UID: \"717832b2-73e3-4b2c-8f82-26603268ee98\") " pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.981347 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"f89dbfd59c6e09685ff99b2c6dc24804040f5732aebfa536884d67f6c8244e4c"} Sep 29 12:58:08 crc kubenswrapper[4611]: I0929 12:58:08.981401 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"11c0390a6a1fbbf2c18f0af386133bc9901b17c9e0984692406b971d142837a9"} Sep 29 12:58:09 crc kubenswrapper[4611]: I0929 12:58:09.128974 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:09 crc kubenswrapper[4611]: I0929 12:58:09.205792 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mmxgc"] Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.037357 4611 generic.go:334] "Generic (PLEG): container finished" podID="6655f4be-50ff-4ca4-aec0-82d21d2cc552" containerID="c4edfee9a5209657b91e78dee3c1cbf984b8e04a582b834b6ea216abbc6c1fa7" exitCode=0 Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.037744 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mmxgc" event={"ID":"6655f4be-50ff-4ca4-aec0-82d21d2cc552","Type":"ContainerDied","Data":"c4edfee9a5209657b91e78dee3c1cbf984b8e04a582b834b6ea216abbc6c1fa7"} Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.037777 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mmxgc" event={"ID":"6655f4be-50ff-4ca4-aec0-82d21d2cc552","Type":"ContainerStarted","Data":"a53649217e3de3dd3da04af4e7c993f80aca25db942d3c25926d4f3e8a46c1fe"} Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.041381 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"5aa3ba6ae36b8efb9ef18d3ee193208a953e4be1b849ca3917e4d77301b8b4aa"} Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.041436 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"3fe73554727e3fce0a10497b71f21184724824e29416ac05b460ae35c6ba48d2"} Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.369594 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-6kfbn"] Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.388486 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-m4vwf"] Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.987330 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-hvpz4"] Sep 29 12:58:10 crc kubenswrapper[4611]: I0929 12:58:10.988947 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:10.999835 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-hvpz4"] Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.054941 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6kfbn" event={"ID":"717832b2-73e3-4b2c-8f82-26603268ee98","Type":"ContainerStarted","Data":"09beadb43ee2e338d4bbe181cd0327b44d454676f7c8d8d9f5622c6bc4320d3b"} Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.055008 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6kfbn" event={"ID":"717832b2-73e3-4b2c-8f82-26603268ee98","Type":"ContainerStarted","Data":"32fbbf26f645b0e20859b9a3c459b320b98c99f4855c1ed7d4ff67b0e1470afc"} Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.057929 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m4vwf" event={"ID":"ac707613-fa2b-4406-8789-2c0a072d49f5","Type":"ContainerStarted","Data":"0a2190557c2a56b76c791c9a3333ce65ea6a1042b9be2639f835d99552ebbb01"} Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.057966 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m4vwf" event={"ID":"ac707613-fa2b-4406-8789-2c0a072d49f5","Type":"ContainerStarted","Data":"2b3819f3f4583bd226857a705d32b4b06499299633f1bf0fe549d56c8d33076f"} Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.083066 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-6kfbn" podStartSLOduration=3.083042026 podStartE2EDuration="3.083042026s" podCreationTimestamp="2025-09-29 12:58:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:11.075239341 +0000 UTC m=+1077.966758957" watchObservedRunningTime="2025-09-29 12:58:11.083042026 +0000 UTC m=+1077.974561632" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.100664 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-m4vwf" podStartSLOduration=3.100643604 podStartE2EDuration="3.100643604s" podCreationTimestamp="2025-09-29 12:58:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:11.09219712 +0000 UTC m=+1077.983716726" watchObservedRunningTime="2025-09-29 12:58:11.100643604 +0000 UTC m=+1077.992163200" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.100718 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkkbl\" (UniqueName: \"kubernetes.io/projected/1ce7630f-ba0f-426d-8440-7525c4555235-kube-api-access-tkkbl\") pod \"keystone-db-create-hvpz4\" (UID: \"1ce7630f-ba0f-426d-8440-7525c4555235\") " pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.205310 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkkbl\" (UniqueName: \"kubernetes.io/projected/1ce7630f-ba0f-426d-8440-7525c4555235-kube-api-access-tkkbl\") pod \"keystone-db-create-hvpz4\" (UID: \"1ce7630f-ba0f-426d-8440-7525c4555235\") " pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.240825 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkkbl\" (UniqueName: \"kubernetes.io/projected/1ce7630f-ba0f-426d-8440-7525c4555235-kube-api-access-tkkbl\") pod \"keystone-db-create-hvpz4\" (UID: \"1ce7630f-ba0f-426d-8440-7525c4555235\") " pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.304201 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.381200 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b110-account-create-v9jb8"] Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.383386 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.407485 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b110-account-create-v9jb8"] Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.417900 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.506855 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.520475 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9gp2\" (UniqueName: \"kubernetes.io/projected/5c7901dd-cfe7-48a6-91c1-78fb168963cb-kube-api-access-s9gp2\") pod \"placement-b110-account-create-v9jb8\" (UID: \"5c7901dd-cfe7-48a6-91c1-78fb168963cb\") " pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.621479 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmx2j\" (UniqueName: \"kubernetes.io/projected/6655f4be-50ff-4ca4-aec0-82d21d2cc552-kube-api-access-nmx2j\") pod \"6655f4be-50ff-4ca4-aec0-82d21d2cc552\" (UID: \"6655f4be-50ff-4ca4-aec0-82d21d2cc552\") " Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.621850 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9gp2\" (UniqueName: \"kubernetes.io/projected/5c7901dd-cfe7-48a6-91c1-78fb168963cb-kube-api-access-s9gp2\") pod \"placement-b110-account-create-v9jb8\" (UID: \"5c7901dd-cfe7-48a6-91c1-78fb168963cb\") " pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.626392 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6655f4be-50ff-4ca4-aec0-82d21d2cc552-kube-api-access-nmx2j" (OuterVolumeSpecName: "kube-api-access-nmx2j") pod "6655f4be-50ff-4ca4-aec0-82d21d2cc552" (UID: "6655f4be-50ff-4ca4-aec0-82d21d2cc552"). InnerVolumeSpecName "kube-api-access-nmx2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.649555 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9gp2\" (UniqueName: \"kubernetes.io/projected/5c7901dd-cfe7-48a6-91c1-78fb168963cb-kube-api-access-s9gp2\") pod \"placement-b110-account-create-v9jb8\" (UID: \"5c7901dd-cfe7-48a6-91c1-78fb168963cb\") " pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.723419 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmx2j\" (UniqueName: \"kubernetes.io/projected/6655f4be-50ff-4ca4-aec0-82d21d2cc552-kube-api-access-nmx2j\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.793823 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:11 crc kubenswrapper[4611]: I0929 12:58:11.819640 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-hvpz4"] Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.069118 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mmxgc" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.069881 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mmxgc" event={"ID":"6655f4be-50ff-4ca4-aec0-82d21d2cc552","Type":"ContainerDied","Data":"a53649217e3de3dd3da04af4e7c993f80aca25db942d3c25926d4f3e8a46c1fe"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.069910 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a53649217e3de3dd3da04af4e7c993f80aca25db942d3c25926d4f3e8a46c1fe" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.080019 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"c269e682af6053eacaf81520281ea4ca697bd7b0a86cb14799eb2ef7c6f8489c"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.080058 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"e3c1813b1e81884f93c99e2c400db1c2d5960355133eef2d52daf29fb11b332f"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.080070 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"28199f5759ef01bf5930d2b5052cee973af874a07da77a1afeeb63770a7f786c"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.082314 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-hvpz4" event={"ID":"1ce7630f-ba0f-426d-8440-7525c4555235","Type":"ContainerStarted","Data":"25c96480d3b5cac705f71c44f0dd4504d2581633e079f7e628e87fe0d735d88a"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.083444 4611 generic.go:334] "Generic (PLEG): container finished" podID="ac707613-fa2b-4406-8789-2c0a072d49f5" containerID="0a2190557c2a56b76c791c9a3333ce65ea6a1042b9be2639f835d99552ebbb01" exitCode=0 Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.083485 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m4vwf" event={"ID":"ac707613-fa2b-4406-8789-2c0a072d49f5","Type":"ContainerDied","Data":"0a2190557c2a56b76c791c9a3333ce65ea6a1042b9be2639f835d99552ebbb01"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.084118 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9b28-account-create-nt47n"] Sep 29 12:58:12 crc kubenswrapper[4611]: E0929 12:58:12.084406 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6655f4be-50ff-4ca4-aec0-82d21d2cc552" containerName="mariadb-database-create" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.084420 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6655f4be-50ff-4ca4-aec0-82d21d2cc552" containerName="mariadb-database-create" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.084655 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6655f4be-50ff-4ca4-aec0-82d21d2cc552" containerName="mariadb-database-create" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.085194 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.087494 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.088742 4611 generic.go:334] "Generic (PLEG): container finished" podID="717832b2-73e3-4b2c-8f82-26603268ee98" containerID="09beadb43ee2e338d4bbe181cd0327b44d454676f7c8d8d9f5622c6bc4320d3b" exitCode=0 Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.088789 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6kfbn" event={"ID":"717832b2-73e3-4b2c-8f82-26603268ee98","Type":"ContainerDied","Data":"09beadb43ee2e338d4bbe181cd0327b44d454676f7c8d8d9f5622c6bc4320d3b"} Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.101768 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9b28-account-create-nt47n"] Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.233911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnmdz\" (UniqueName: \"kubernetes.io/projected/42115bcd-dc69-491c-a920-96969813366c-kube-api-access-gnmdz\") pod \"glance-9b28-account-create-nt47n\" (UID: \"42115bcd-dc69-491c-a920-96969813366c\") " pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.280638 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b110-account-create-v9jb8"] Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.335136 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnmdz\" (UniqueName: \"kubernetes.io/projected/42115bcd-dc69-491c-a920-96969813366c-kube-api-access-gnmdz\") pod \"glance-9b28-account-create-nt47n\" (UID: \"42115bcd-dc69-491c-a920-96969813366c\") " pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:12 crc kubenswrapper[4611]: W0929 12:58:12.343942 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c7901dd_cfe7_48a6_91c1_78fb168963cb.slice/crio-c0394efe13f307b295102fc87bb8eb2a584ad845d911e3385b75ddd5bfbe61f2 WatchSource:0}: Error finding container c0394efe13f307b295102fc87bb8eb2a584ad845d911e3385b75ddd5bfbe61f2: Status 404 returned error can't find the container with id c0394efe13f307b295102fc87bb8eb2a584ad845d911e3385b75ddd5bfbe61f2 Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.355545 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnmdz\" (UniqueName: \"kubernetes.io/projected/42115bcd-dc69-491c-a920-96969813366c-kube-api-access-gnmdz\") pod \"glance-9b28-account-create-nt47n\" (UID: \"42115bcd-dc69-491c-a920-96969813366c\") " pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.407142 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:12 crc kubenswrapper[4611]: I0929 12:58:12.888287 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9b28-account-create-nt47n"] Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.096875 4611 generic.go:334] "Generic (PLEG): container finished" podID="1ce7630f-ba0f-426d-8440-7525c4555235" containerID="f8e8875a9aaf4d32509b9630ed153f8bd32c254b7de03e3485b742b1d6f30469" exitCode=0 Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.097364 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-hvpz4" event={"ID":"1ce7630f-ba0f-426d-8440-7525c4555235","Type":"ContainerDied","Data":"f8e8875a9aaf4d32509b9630ed153f8bd32c254b7de03e3485b742b1d6f30469"} Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.098435 4611 generic.go:334] "Generic (PLEG): container finished" podID="5c7901dd-cfe7-48a6-91c1-78fb168963cb" containerID="aacc762242ca7e1489e8960dd7b52c81c9882cbb597357c8ea66ac9bd4201bd7" exitCode=0 Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.098532 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b110-account-create-v9jb8" event={"ID":"5c7901dd-cfe7-48a6-91c1-78fb168963cb","Type":"ContainerDied","Data":"aacc762242ca7e1489e8960dd7b52c81c9882cbb597357c8ea66ac9bd4201bd7"} Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.098555 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b110-account-create-v9jb8" event={"ID":"5c7901dd-cfe7-48a6-91c1-78fb168963cb","Type":"ContainerStarted","Data":"c0394efe13f307b295102fc87bb8eb2a584ad845d911e3385b75ddd5bfbe61f2"} Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.101534 4611 generic.go:334] "Generic (PLEG): container finished" podID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerID="3dfee8d7bb12af3564c72d9234b8e24eae27c1ab6e23a547e02cb38b0272c1c3" exitCode=0 Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.101599 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a8501653-441a-4c5f-b098-bc5fb7aeba22","Type":"ContainerDied","Data":"3dfee8d7bb12af3564c72d9234b8e24eae27c1ab6e23a547e02cb38b0272c1c3"} Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.103292 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9b28-account-create-nt47n" event={"ID":"42115bcd-dc69-491c-a920-96969813366c","Type":"ContainerStarted","Data":"b0f86879abe511a397630c4b4ea7eb641cc9e881080987a8daff0ba82d3e390e"} Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.107878 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"bb99adb80929e7b860bf08d33aedeb7498b2103792159ec5ac86f0da95c424bd"} Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.590902 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.668883 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.668962 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgc86\" (UniqueName: \"kubernetes.io/projected/ac707613-fa2b-4406-8789-2c0a072d49f5-kube-api-access-cgc86\") pod \"ac707613-fa2b-4406-8789-2c0a072d49f5\" (UID: \"ac707613-fa2b-4406-8789-2c0a072d49f5\") " Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.692075 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac707613-fa2b-4406-8789-2c0a072d49f5-kube-api-access-cgc86" (OuterVolumeSpecName: "kube-api-access-cgc86") pod "ac707613-fa2b-4406-8789-2c0a072d49f5" (UID: "ac707613-fa2b-4406-8789-2c0a072d49f5"). InnerVolumeSpecName "kube-api-access-cgc86". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.771871 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwbnt\" (UniqueName: \"kubernetes.io/projected/717832b2-73e3-4b2c-8f82-26603268ee98-kube-api-access-mwbnt\") pod \"717832b2-73e3-4b2c-8f82-26603268ee98\" (UID: \"717832b2-73e3-4b2c-8f82-26603268ee98\") " Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.772751 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgc86\" (UniqueName: \"kubernetes.io/projected/ac707613-fa2b-4406-8789-2c0a072d49f5-kube-api-access-cgc86\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.784854 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/717832b2-73e3-4b2c-8f82-26603268ee98-kube-api-access-mwbnt" (OuterVolumeSpecName: "kube-api-access-mwbnt") pod "717832b2-73e3-4b2c-8f82-26603268ee98" (UID: "717832b2-73e3-4b2c-8f82-26603268ee98"). InnerVolumeSpecName "kube-api-access-mwbnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:13 crc kubenswrapper[4611]: I0929 12:58:13.874764 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwbnt\" (UniqueName: \"kubernetes.io/projected/717832b2-73e3-4b2c-8f82-26603268ee98-kube-api-access-mwbnt\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.120659 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a8501653-441a-4c5f-b098-bc5fb7aeba22","Type":"ContainerStarted","Data":"cb1724d6ea5c29546933c2f22f5ec658deda7f84e8465c9001b046ec486d7a65"} Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.120868 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.125401 4611 generic.go:334] "Generic (PLEG): container finished" podID="42115bcd-dc69-491c-a920-96969813366c" containerID="5a348c3cb9723c5ff8a65384e712b3d2021f9dbebec4032ca1b82b7c354f7b23" exitCode=0 Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.125430 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9b28-account-create-nt47n" event={"ID":"42115bcd-dc69-491c-a920-96969813366c","Type":"ContainerDied","Data":"5a348c3cb9723c5ff8a65384e712b3d2021f9dbebec4032ca1b82b7c354f7b23"} Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.137177 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"9a737019e26a9e8c888cadb7565f841f8b9a5c6df761a6f811cdd66e18f1f2d0"} Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.137221 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"7826b3db0e64b9f4680b60152c8d916ff8f7a3b5eb737f14c40e9873c2f1c1a5"} Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.143125 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-m4vwf" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.143926 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-m4vwf" event={"ID":"ac707613-fa2b-4406-8789-2c0a072d49f5","Type":"ContainerDied","Data":"2b3819f3f4583bd226857a705d32b4b06499299633f1bf0fe549d56c8d33076f"} Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.143974 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b3819f3f4583bd226857a705d32b4b06499299633f1bf0fe549d56c8d33076f" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.150103 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-6kfbn" event={"ID":"717832b2-73e3-4b2c-8f82-26603268ee98","Type":"ContainerDied","Data":"32fbbf26f645b0e20859b9a3c459b320b98c99f4855c1ed7d4ff67b0e1470afc"} Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.150162 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32fbbf26f645b0e20859b9a3c459b320b98c99f4855c1ed7d4ff67b0e1470afc" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.150122 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-6kfbn" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.154883 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371948.699911 podStartE2EDuration="1m28.154864394s" podCreationTimestamp="2025-09-29 12:56:46 +0000 UTC" firstStartedPulling="2025-09-29 12:56:49.094655057 +0000 UTC m=+995.986174663" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:14.150891499 +0000 UTC m=+1081.042411115" watchObservedRunningTime="2025-09-29 12:58:14.154864394 +0000 UTC m=+1081.046384000" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.515098 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-9jsdt" podUID="b42caf5f-0509-41a3-ab3c-49b5b2be817e" containerName="ovn-controller" probeResult="failure" output=< Sep 29 12:58:14 crc kubenswrapper[4611]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 12:58:14 crc kubenswrapper[4611]: > Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.626023 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.646552 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.689653 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9gp2\" (UniqueName: \"kubernetes.io/projected/5c7901dd-cfe7-48a6-91c1-78fb168963cb-kube-api-access-s9gp2\") pod \"5c7901dd-cfe7-48a6-91c1-78fb168963cb\" (UID: \"5c7901dd-cfe7-48a6-91c1-78fb168963cb\") " Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.689707 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkkbl\" (UniqueName: \"kubernetes.io/projected/1ce7630f-ba0f-426d-8440-7525c4555235-kube-api-access-tkkbl\") pod \"1ce7630f-ba0f-426d-8440-7525c4555235\" (UID: \"1ce7630f-ba0f-426d-8440-7525c4555235\") " Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.724140 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c7901dd-cfe7-48a6-91c1-78fb168963cb-kube-api-access-s9gp2" (OuterVolumeSpecName: "kube-api-access-s9gp2") pod "5c7901dd-cfe7-48a6-91c1-78fb168963cb" (UID: "5c7901dd-cfe7-48a6-91c1-78fb168963cb"). InnerVolumeSpecName "kube-api-access-s9gp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.740073 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce7630f-ba0f-426d-8440-7525c4555235-kube-api-access-tkkbl" (OuterVolumeSpecName: "kube-api-access-tkkbl") pod "1ce7630f-ba0f-426d-8440-7525c4555235" (UID: "1ce7630f-ba0f-426d-8440-7525c4555235"). InnerVolumeSpecName "kube-api-access-tkkbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.792931 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9gp2\" (UniqueName: \"kubernetes.io/projected/5c7901dd-cfe7-48a6-91c1-78fb168963cb-kube-api-access-s9gp2\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:14 crc kubenswrapper[4611]: I0929 12:58:14.793111 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkkbl\" (UniqueName: \"kubernetes.io/projected/1ce7630f-ba0f-426d-8440-7525c4555235-kube-api-access-tkkbl\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.159546 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-hvpz4" event={"ID":"1ce7630f-ba0f-426d-8440-7525c4555235","Type":"ContainerDied","Data":"25c96480d3b5cac705f71c44f0dd4504d2581633e079f7e628e87fe0d735d88a"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.160668 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25c96480d3b5cac705f71c44f0dd4504d2581633e079f7e628e87fe0d735d88a" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.159610 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hvpz4" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.161585 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b110-account-create-v9jb8" event={"ID":"5c7901dd-cfe7-48a6-91c1-78fb168963cb","Type":"ContainerDied","Data":"c0394efe13f307b295102fc87bb8eb2a584ad845d911e3385b75ddd5bfbe61f2"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.161824 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0394efe13f307b295102fc87bb8eb2a584ad845d911e3385b75ddd5bfbe61f2" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.161807 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b110-account-create-v9jb8" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.168727 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"1c39541d0e74829874a49690478cd7b106685c22425abbb562b4b2a3770f0acb"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.168772 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"71048acdf73c52e8bfe405080867ba0321e9ba926e49dd9c267c792e0f2f9a12"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.168787 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"1e76b24319a38b2615dcfbe82c21b363447a2b2bd4bb0157fbc19e2e3ac70d40"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.168798 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"c7dd74194bcb7956d2fcf3791721a934ed969cdaaa021f792d0ab99fdc6e47a7"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.168811 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"458f3780-8709-4a3c-ac9e-9a1b5ced2172","Type":"ContainerStarted","Data":"afa3be7fef762eecad30907d0cc7a2bba8b68bc5bb59a26beca8be8167e0ade9"} Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.216693 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.482499551 podStartE2EDuration="43.216667204s" podCreationTimestamp="2025-09-29 12:57:32 +0000 UTC" firstStartedPulling="2025-09-29 12:58:06.827294368 +0000 UTC m=+1073.718813984" lastFinishedPulling="2025-09-29 12:58:13.561462041 +0000 UTC m=+1080.452981637" observedRunningTime="2025-09-29 12:58:15.209396614 +0000 UTC m=+1082.100916240" watchObservedRunningTime="2025-09-29 12:58:15.216667204 +0000 UTC m=+1082.108186820" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.490097 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.514123 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5449cc85b9-rgp8v"] Sep 29 12:58:15 crc kubenswrapper[4611]: E0929 12:58:15.514450 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42115bcd-dc69-491c-a920-96969813366c" containerName="mariadb-account-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.514467 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="42115bcd-dc69-491c-a920-96969813366c" containerName="mariadb-account-create" Sep 29 12:58:15 crc kubenswrapper[4611]: E0929 12:58:15.514495 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717832b2-73e3-4b2c-8f82-26603268ee98" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.514501 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="717832b2-73e3-4b2c-8f82-26603268ee98" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: E0929 12:58:15.514512 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac707613-fa2b-4406-8789-2c0a072d49f5" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.514518 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac707613-fa2b-4406-8789-2c0a072d49f5" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: E0929 12:58:15.514530 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce7630f-ba0f-426d-8440-7525c4555235" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.514536 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce7630f-ba0f-426d-8440-7525c4555235" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: E0929 12:58:15.514547 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7901dd-cfe7-48a6-91c1-78fb168963cb" containerName="mariadb-account-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.514553 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7901dd-cfe7-48a6-91c1-78fb168963cb" containerName="mariadb-account-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.516355 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="717832b2-73e3-4b2c-8f82-26603268ee98" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.516390 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac707613-fa2b-4406-8789-2c0a072d49f5" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.516399 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="42115bcd-dc69-491c-a920-96969813366c" containerName="mariadb-account-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.516411 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7901dd-cfe7-48a6-91c1-78fb168963cb" containerName="mariadb-account-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.516421 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ce7630f-ba0f-426d-8440-7525c4555235" containerName="mariadb-database-create" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.517507 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.520652 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.550259 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5449cc85b9-rgp8v"] Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.606794 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnmdz\" (UniqueName: \"kubernetes.io/projected/42115bcd-dc69-491c-a920-96969813366c-kube-api-access-gnmdz\") pod \"42115bcd-dc69-491c-a920-96969813366c\" (UID: \"42115bcd-dc69-491c-a920-96969813366c\") " Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.634915 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42115bcd-dc69-491c-a920-96969813366c-kube-api-access-gnmdz" (OuterVolumeSpecName: "kube-api-access-gnmdz") pod "42115bcd-dc69-491c-a920-96969813366c" (UID: "42115bcd-dc69-491c-a920-96969813366c"). InnerVolumeSpecName "kube-api-access-gnmdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.708474 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-config\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.708555 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-sb\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.708644 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-nb\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.708673 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-svc\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.708716 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-swift-storage-0\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.708736 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckf5p\" (UniqueName: \"kubernetes.io/projected/f994bbc6-ef59-4f7c-9c0b-709d712b5925-kube-api-access-ckf5p\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.709106 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnmdz\" (UniqueName: \"kubernetes.io/projected/42115bcd-dc69-491c-a920-96969813366c-kube-api-access-gnmdz\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.810704 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-config\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.810807 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-sb\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.810882 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-nb\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.810907 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-svc\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.811978 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-nb\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.811904 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-swift-storage-0\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.812056 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckf5p\" (UniqueName: \"kubernetes.io/projected/f994bbc6-ef59-4f7c-9c0b-709d712b5925-kube-api-access-ckf5p\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.812063 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-config\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.812094 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-sb\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.812022 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-svc\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.812744 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-swift-storage-0\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.828874 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckf5p\" (UniqueName: \"kubernetes.io/projected/f994bbc6-ef59-4f7c-9c0b-709d712b5925-kube-api-access-ckf5p\") pod \"dnsmasq-dns-5449cc85b9-rgp8v\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:15 crc kubenswrapper[4611]: I0929 12:58:15.884114 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:16 crc kubenswrapper[4611]: I0929 12:58:16.180902 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9b28-account-create-nt47n" Sep 29 12:58:16 crc kubenswrapper[4611]: I0929 12:58:16.181244 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9b28-account-create-nt47n" event={"ID":"42115bcd-dc69-491c-a920-96969813366c","Type":"ContainerDied","Data":"b0f86879abe511a397630c4b4ea7eb641cc9e881080987a8daff0ba82d3e390e"} Sep 29 12:58:16 crc kubenswrapper[4611]: I0929 12:58:16.181262 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0f86879abe511a397630c4b4ea7eb641cc9e881080987a8daff0ba82d3e390e" Sep 29 12:58:16 crc kubenswrapper[4611]: I0929 12:58:16.344055 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5449cc85b9-rgp8v"] Sep 29 12:58:16 crc kubenswrapper[4611]: W0929 12:58:16.359874 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf994bbc6_ef59_4f7c_9c0b_709d712b5925.slice/crio-191ba79a83f97ae2b9b355dd918620a13ec26db0958a01f623a1de4ecb2a864d WatchSource:0}: Error finding container 191ba79a83f97ae2b9b355dd918620a13ec26db0958a01f623a1de4ecb2a864d: Status 404 returned error can't find the container with id 191ba79a83f97ae2b9b355dd918620a13ec26db0958a01f623a1de4ecb2a864d Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.192401 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-ljr99"] Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.193872 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.196306 4611 generic.go:334] "Generic (PLEG): container finished" podID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerID="f30b7e41d633212e1aaf6d47c74ed2472a82c0f09854d45b4fba620b6bfed093" exitCode=0 Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.196337 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" event={"ID":"f994bbc6-ef59-4f7c-9c0b-709d712b5925","Type":"ContainerDied","Data":"f30b7e41d633212e1aaf6d47c74ed2472a82c0f09854d45b4fba620b6bfed093"} Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.196356 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" event={"ID":"f994bbc6-ef59-4f7c-9c0b-709d712b5925","Type":"ContainerStarted","Data":"191ba79a83f97ae2b9b355dd918620a13ec26db0958a01f623a1de4ecb2a864d"} Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.197157 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.201840 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hd98l" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.216467 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-ljr99"] Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.338453 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-combined-ca-bundle\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.338510 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-config-data\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.338604 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-db-sync-config-data\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.338672 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmts9\" (UniqueName: \"kubernetes.io/projected/88e8c098-763d-4a3b-b5e1-3f29d2b37845-kube-api-access-pmts9\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.440684 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-db-sync-config-data\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.440761 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmts9\" (UniqueName: \"kubernetes.io/projected/88e8c098-763d-4a3b-b5e1-3f29d2b37845-kube-api-access-pmts9\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.440830 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-combined-ca-bundle\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.440866 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-config-data\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.444833 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-db-sync-config-data\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.445389 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-combined-ca-bundle\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.449100 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-config-data\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.457937 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmts9\" (UniqueName: \"kubernetes.io/projected/88e8c098-763d-4a3b-b5e1-3f29d2b37845-kube-api-access-pmts9\") pod \"glance-db-sync-ljr99\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:17 crc kubenswrapper[4611]: I0929 12:58:17.672287 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.208676 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" event={"ID":"f994bbc6-ef59-4f7c-9c0b-709d712b5925","Type":"ContainerStarted","Data":"45beec9f037b8f8d5137a9276aea854ea3ce4bc7e5b0a0be47e058d6a1f01757"} Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.209154 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.220827 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-ljr99"] Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.243681 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" podStartSLOduration=3.243656757 podStartE2EDuration="3.243656757s" podCreationTimestamp="2025-09-29 12:58:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:18.23679286 +0000 UTC m=+1085.128312476" watchObservedRunningTime="2025-09-29 12:58:18.243656757 +0000 UTC m=+1085.135176373" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.582140 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-2a63-account-create-hxwzj"] Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.583568 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.587159 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.597301 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-2a63-account-create-hxwzj"] Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.762853 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hsbc\" (UniqueName: \"kubernetes.io/projected/a0991826-0ea3-46e1-91d7-995b1eeb3772-kube-api-access-2hsbc\") pod \"cinder-2a63-account-create-hxwzj\" (UID: \"a0991826-0ea3-46e1-91d7-995b1eeb3772\") " pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.865891 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hsbc\" (UniqueName: \"kubernetes.io/projected/a0991826-0ea3-46e1-91d7-995b1eeb3772-kube-api-access-2hsbc\") pod \"cinder-2a63-account-create-hxwzj\" (UID: \"a0991826-0ea3-46e1-91d7-995b1eeb3772\") " pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.893720 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hsbc\" (UniqueName: \"kubernetes.io/projected/a0991826-0ea3-46e1-91d7-995b1eeb3772-kube-api-access-2hsbc\") pod \"cinder-2a63-account-create-hxwzj\" (UID: \"a0991826-0ea3-46e1-91d7-995b1eeb3772\") " pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:18 crc kubenswrapper[4611]: I0929 12:58:18.921307 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.218618 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ljr99" event={"ID":"88e8c098-763d-4a3b-b5e1-3f29d2b37845","Type":"ContainerStarted","Data":"e3824b67b76d0666d1f3c1a21d0d60a5f1c33124aedcfe2d194a4c116df6723c"} Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.430351 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-2a63-account-create-hxwzj"] Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.493881 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-9jsdt" podUID="b42caf5f-0509-41a3-ab3c-49b5b2be817e" containerName="ovn-controller" probeResult="failure" output=< Sep 29 12:58:19 crc kubenswrapper[4611]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 12:58:19 crc kubenswrapper[4611]: > Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.548160 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.549975 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qrlpz" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.779227 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9jsdt-config-h2kcg"] Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.780243 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.813089 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9jsdt-config-h2kcg"] Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.880367 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-log-ovn\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.880458 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25452\" (UniqueName: \"kubernetes.io/projected/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-kube-api-access-25452\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.880784 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-scripts\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.880848 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run-ovn\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.880991 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.982972 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-scripts\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.983033 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run-ovn\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.983106 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.983165 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-log-ovn\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.983217 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25452\" (UniqueName: \"kubernetes.io/projected/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-kube-api-access-25452\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.984972 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.984997 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run-ovn\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.984999 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-log-ovn\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:19 crc kubenswrapper[4611]: I0929 12:58:19.986472 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-scripts\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:20 crc kubenswrapper[4611]: I0929 12:58:20.006427 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25452\" (UniqueName: \"kubernetes.io/projected/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-kube-api-access-25452\") pod \"ovn-controller-9jsdt-config-h2kcg\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:20 crc kubenswrapper[4611]: I0929 12:58:20.096244 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:20 crc kubenswrapper[4611]: I0929 12:58:20.235986 4611 generic.go:334] "Generic (PLEG): container finished" podID="a0991826-0ea3-46e1-91d7-995b1eeb3772" containerID="1498868025b262a8ce3c2df4700757af26e941c0c5f135461e5552464eef0eb7" exitCode=0 Sep 29 12:58:20 crc kubenswrapper[4611]: I0929 12:58:20.236731 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-2a63-account-create-hxwzj" event={"ID":"a0991826-0ea3-46e1-91d7-995b1eeb3772","Type":"ContainerDied","Data":"1498868025b262a8ce3c2df4700757af26e941c0c5f135461e5552464eef0eb7"} Sep 29 12:58:20 crc kubenswrapper[4611]: I0929 12:58:20.236756 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-2a63-account-create-hxwzj" event={"ID":"a0991826-0ea3-46e1-91d7-995b1eeb3772","Type":"ContainerStarted","Data":"64cfdcaa6934bb60f2ef514b0272ad4bfdd2bac7ce68c6e885d499b6bd6139a5"} Sep 29 12:58:20 crc kubenswrapper[4611]: I0929 12:58:20.824903 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9jsdt-config-h2kcg"] Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.181002 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-337e-account-create-vwdf8"] Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.182521 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.192366 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.208808 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-337e-account-create-vwdf8"] Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.249337 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt-config-h2kcg" event={"ID":"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4","Type":"ContainerStarted","Data":"37c546f89bec485841fd676ff8aebfc4ea1bff2eb84df400fd44497f90f7c177"} Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.249384 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt-config-h2kcg" event={"ID":"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4","Type":"ContainerStarted","Data":"7381dee97a16b792740ef079ec2a02e3e097f7e1355f5f6b62faf38e815b3944"} Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.273988 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-9jsdt-config-h2kcg" podStartSLOduration=2.273965308 podStartE2EDuration="2.273965308s" podCreationTimestamp="2025-09-29 12:58:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:21.267831461 +0000 UTC m=+1088.159351077" watchObservedRunningTime="2025-09-29 12:58:21.273965308 +0000 UTC m=+1088.165484914" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.303112 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n7kq\" (UniqueName: \"kubernetes.io/projected/53965ddb-78b2-40ba-aa0b-808caee352d3-kube-api-access-8n7kq\") pod \"keystone-337e-account-create-vwdf8\" (UID: \"53965ddb-78b2-40ba-aa0b-808caee352d3\") " pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.404295 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n7kq\" (UniqueName: \"kubernetes.io/projected/53965ddb-78b2-40ba-aa0b-808caee352d3-kube-api-access-8n7kq\") pod \"keystone-337e-account-create-vwdf8\" (UID: \"53965ddb-78b2-40ba-aa0b-808caee352d3\") " pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.445433 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n7kq\" (UniqueName: \"kubernetes.io/projected/53965ddb-78b2-40ba-aa0b-808caee352d3-kube-api-access-8n7kq\") pod \"keystone-337e-account-create-vwdf8\" (UID: \"53965ddb-78b2-40ba-aa0b-808caee352d3\") " pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.504102 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.668925 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.810276 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hsbc\" (UniqueName: \"kubernetes.io/projected/a0991826-0ea3-46e1-91d7-995b1eeb3772-kube-api-access-2hsbc\") pod \"a0991826-0ea3-46e1-91d7-995b1eeb3772\" (UID: \"a0991826-0ea3-46e1-91d7-995b1eeb3772\") " Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.831877 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0991826-0ea3-46e1-91d7-995b1eeb3772-kube-api-access-2hsbc" (OuterVolumeSpecName: "kube-api-access-2hsbc") pod "a0991826-0ea3-46e1-91d7-995b1eeb3772" (UID: "a0991826-0ea3-46e1-91d7-995b1eeb3772"). InnerVolumeSpecName "kube-api-access-2hsbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.911694 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hsbc\" (UniqueName: \"kubernetes.io/projected/a0991826-0ea3-46e1-91d7-995b1eeb3772-kube-api-access-2hsbc\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:21 crc kubenswrapper[4611]: I0929 12:58:21.992039 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-337e-account-create-vwdf8"] Sep 29 12:58:22 crc kubenswrapper[4611]: W0929 12:58:22.004253 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53965ddb_78b2_40ba_aa0b_808caee352d3.slice/crio-cd192920464d3b1452f29be91560e0e177e38221888e4128b115f234457513f4 WatchSource:0}: Error finding container cd192920464d3b1452f29be91560e0e177e38221888e4128b115f234457513f4: Status 404 returned error can't find the container with id cd192920464d3b1452f29be91560e0e177e38221888e4128b115f234457513f4 Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.261307 4611 generic.go:334] "Generic (PLEG): container finished" podID="7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" containerID="37c546f89bec485841fd676ff8aebfc4ea1bff2eb84df400fd44497f90f7c177" exitCode=0 Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.261418 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt-config-h2kcg" event={"ID":"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4","Type":"ContainerDied","Data":"37c546f89bec485841fd676ff8aebfc4ea1bff2eb84df400fd44497f90f7c177"} Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.265670 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-2a63-account-create-hxwzj" event={"ID":"a0991826-0ea3-46e1-91d7-995b1eeb3772","Type":"ContainerDied","Data":"64cfdcaa6934bb60f2ef514b0272ad4bfdd2bac7ce68c6e885d499b6bd6139a5"} Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.265703 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64cfdcaa6934bb60f2ef514b0272ad4bfdd2bac7ce68c6e885d499b6bd6139a5" Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.265704 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-2a63-account-create-hxwzj" Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.267425 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-337e-account-create-vwdf8" event={"ID":"53965ddb-78b2-40ba-aa0b-808caee352d3","Type":"ContainerStarted","Data":"f8b003c2b5a1f94fb0322f5508a8a342c8472ce5e071b045dc8d796c7bc33a1f"} Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.267451 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-337e-account-create-vwdf8" event={"ID":"53965ddb-78b2-40ba-aa0b-808caee352d3","Type":"ContainerStarted","Data":"cd192920464d3b1452f29be91560e0e177e38221888e4128b115f234457513f4"} Sep 29 12:58:22 crc kubenswrapper[4611]: I0929 12:58:22.307843 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-337e-account-create-vwdf8" podStartSLOduration=1.307815572 podStartE2EDuration="1.307815572s" podCreationTimestamp="2025-09-29 12:58:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:22.298791092 +0000 UTC m=+1089.190310708" watchObservedRunningTime="2025-09-29 12:58:22.307815572 +0000 UTC m=+1089.199335198" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.276952 4611 generic.go:334] "Generic (PLEG): container finished" podID="53965ddb-78b2-40ba-aa0b-808caee352d3" containerID="f8b003c2b5a1f94fb0322f5508a8a342c8472ce5e071b045dc8d796c7bc33a1f" exitCode=0 Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.277329 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-337e-account-create-vwdf8" event={"ID":"53965ddb-78b2-40ba-aa0b-808caee352d3","Type":"ContainerDied","Data":"f8b003c2b5a1f94fb0322f5508a8a342c8472ce5e071b045dc8d796c7bc33a1f"} Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.618076 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744171 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run\") pod \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744226 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run" (OuterVolumeSpecName: "var-run") pod "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" (UID: "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744260 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-log-ovn\") pod \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744329 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-scripts\") pod \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744354 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" (UID: "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744390 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25452\" (UniqueName: \"kubernetes.io/projected/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-kube-api-access-25452\") pod \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744431 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run-ovn\") pod \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\" (UID: \"7d7bf763-1115-4c02-bc91-1b95e7f3cbc4\") " Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744819 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" (UID: "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744931 4611 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744949 4611 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.744960 4611 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.745811 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-scripts" (OuterVolumeSpecName: "scripts") pod "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" (UID: "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.789978 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-kube-api-access-25452" (OuterVolumeSpecName: "kube-api-access-25452") pod "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" (UID: "7d7bf763-1115-4c02-bc91-1b95e7f3cbc4"). InnerVolumeSpecName "kube-api-access-25452". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.846956 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.847230 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25452\" (UniqueName: \"kubernetes.io/projected/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4-kube-api-access-25452\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.917212 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9jsdt-config-h2kcg"] Sep 29 12:58:23 crc kubenswrapper[4611]: I0929 12:58:23.931785 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9jsdt-config-h2kcg"] Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.003137 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9jsdt-config-tlhfb"] Sep 29 12:58:24 crc kubenswrapper[4611]: E0929 12:58:24.003554 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0991826-0ea3-46e1-91d7-995b1eeb3772" containerName="mariadb-account-create" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.003576 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0991826-0ea3-46e1-91d7-995b1eeb3772" containerName="mariadb-account-create" Sep 29 12:58:24 crc kubenswrapper[4611]: E0929 12:58:24.003588 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" containerName="ovn-config" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.003596 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" containerName="ovn-config" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.003810 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0991826-0ea3-46e1-91d7-995b1eeb3772" containerName="mariadb-account-create" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.003839 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" containerName="ovn-config" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.004442 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.026504 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9jsdt-config-tlhfb"] Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.153260 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-log-ovn\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.153568 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.153726 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbq2w\" (UniqueName: \"kubernetes.io/projected/dccd7ac1-5475-494b-82b3-96fe7d17c524-kube-api-access-jbq2w\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.153879 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dccd7ac1-5475-494b-82b3-96fe7d17c524-scripts\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.153990 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run-ovn\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255031 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run-ovn\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255105 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-log-ovn\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255152 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255204 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbq2w\" (UniqueName: \"kubernetes.io/projected/dccd7ac1-5475-494b-82b3-96fe7d17c524-kube-api-access-jbq2w\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255279 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dccd7ac1-5475-494b-82b3-96fe7d17c524-scripts\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255501 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run-ovn\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255547 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-log-ovn\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.255882 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.257441 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dccd7ac1-5475-494b-82b3-96fe7d17c524-scripts\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.278124 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbq2w\" (UniqueName: \"kubernetes.io/projected/dccd7ac1-5475-494b-82b3-96fe7d17c524-kube-api-access-jbq2w\") pod \"ovn-controller-9jsdt-config-tlhfb\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.293754 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7381dee97a16b792740ef079ec2a02e3e097f7e1355f5f6b62faf38e815b3944" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.293827 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-h2kcg" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.321828 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.511044 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-9jsdt" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.748029 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.866164 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n7kq\" (UniqueName: \"kubernetes.io/projected/53965ddb-78b2-40ba-aa0b-808caee352d3-kube-api-access-8n7kq\") pod \"53965ddb-78b2-40ba-aa0b-808caee352d3\" (UID: \"53965ddb-78b2-40ba-aa0b-808caee352d3\") " Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.897345 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53965ddb-78b2-40ba-aa0b-808caee352d3-kube-api-access-8n7kq" (OuterVolumeSpecName: "kube-api-access-8n7kq") pod "53965ddb-78b2-40ba-aa0b-808caee352d3" (UID: "53965ddb-78b2-40ba-aa0b-808caee352d3"). InnerVolumeSpecName "kube-api-access-8n7kq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:24 crc kubenswrapper[4611]: I0929 12:58:24.969129 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n7kq\" (UniqueName: \"kubernetes.io/projected/53965ddb-78b2-40ba-aa0b-808caee352d3-kube-api-access-8n7kq\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.016427 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9jsdt-config-tlhfb"] Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.308247 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-337e-account-create-vwdf8" Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.308756 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-337e-account-create-vwdf8" event={"ID":"53965ddb-78b2-40ba-aa0b-808caee352d3","Type":"ContainerDied","Data":"cd192920464d3b1452f29be91560e0e177e38221888e4128b115f234457513f4"} Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.308844 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd192920464d3b1452f29be91560e0e177e38221888e4128b115f234457513f4" Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.310611 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt-config-tlhfb" event={"ID":"dccd7ac1-5475-494b-82b3-96fe7d17c524","Type":"ContainerStarted","Data":"9abf394329407889bdfcf6c644952e4bfe92361e00807546c00b07b4676a3553"} Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.754115 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d7bf763-1115-4c02-bc91-1b95e7f3cbc4" path="/var/lib/kubelet/pods/7d7bf763-1115-4c02-bc91-1b95e7f3cbc4/volumes" Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.885400 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.966376 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69fd5f8c87-pkvjk"] Sep 29 12:58:25 crc kubenswrapper[4611]: I0929 12:58:25.966593 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerName="dnsmasq-dns" containerID="cri-o://2fcd8e048f1f7869c8186ec4cdd8be6001eb3d8ebb88d7e8629e89ae8bbe8a27" gracePeriod=10 Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.323992 4611 generic.go:334] "Generic (PLEG): container finished" podID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerID="2fcd8e048f1f7869c8186ec4cdd8be6001eb3d8ebb88d7e8629e89ae8bbe8a27" exitCode=0 Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.324099 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" event={"ID":"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10","Type":"ContainerDied","Data":"2fcd8e048f1f7869c8186ec4cdd8be6001eb3d8ebb88d7e8629e89ae8bbe8a27"} Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.331970 4611 generic.go:334] "Generic (PLEG): container finished" podID="dccd7ac1-5475-494b-82b3-96fe7d17c524" containerID="4aff2a9484c42afaad021cd6f49cd771106e959a435c58a56b2aa05d93ca305c" exitCode=0 Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.332024 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt-config-tlhfb" event={"ID":"dccd7ac1-5475-494b-82b3-96fe7d17c524","Type":"ContainerDied","Data":"4aff2a9484c42afaad021cd6f49cd771106e959a435c58a56b2aa05d93ca305c"} Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.548421 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.708202 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-dns-svc\") pod \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.708262 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-nb\") pod \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.708286 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-config\") pod \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.708346 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcrnm\" (UniqueName: \"kubernetes.io/projected/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-kube-api-access-kcrnm\") pod \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.708387 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-sb\") pod \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\" (UID: \"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10\") " Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.713892 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-kube-api-access-kcrnm" (OuterVolumeSpecName: "kube-api-access-kcrnm") pod "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" (UID: "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10"). InnerVolumeSpecName "kube-api-access-kcrnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.761584 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-config" (OuterVolumeSpecName: "config") pod "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" (UID: "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.766341 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" (UID: "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.771792 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" (UID: "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.805366 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" (UID: "bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.809668 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.809694 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.809704 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.809713 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcrnm\" (UniqueName: \"kubernetes.io/projected/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-kube-api-access-kcrnm\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:26 crc kubenswrapper[4611]: I0929 12:58:26.809722 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.345052 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.345443 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fd5f8c87-pkvjk" event={"ID":"bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10","Type":"ContainerDied","Data":"8c4dcb102197ceb25fb3caf666fb95236e3020bbf5f817252c1b5815aa4a612b"} Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.346465 4611 scope.go:117] "RemoveContainer" containerID="2fcd8e048f1f7869c8186ec4cdd8be6001eb3d8ebb88d7e8629e89ae8bbe8a27" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.375207 4611 scope.go:117] "RemoveContainer" containerID="685464c25d9581d4db8c7f3e8be1632d730716638233b5e61033e00eb32d2661" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.396238 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69fd5f8c87-pkvjk"] Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.405392 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69fd5f8c87-pkvjk"] Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.612182 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.735453 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-log-ovn\") pod \"dccd7ac1-5475-494b-82b3-96fe7d17c524\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.735987 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "dccd7ac1-5475-494b-82b3-96fe7d17c524" (UID: "dccd7ac1-5475-494b-82b3-96fe7d17c524"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.738794 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dccd7ac1-5475-494b-82b3-96fe7d17c524-scripts\") pod \"dccd7ac1-5475-494b-82b3-96fe7d17c524\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.738843 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbq2w\" (UniqueName: \"kubernetes.io/projected/dccd7ac1-5475-494b-82b3-96fe7d17c524-kube-api-access-jbq2w\") pod \"dccd7ac1-5475-494b-82b3-96fe7d17c524\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.738907 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run-ovn\") pod \"dccd7ac1-5475-494b-82b3-96fe7d17c524\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.739016 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run\") pod \"dccd7ac1-5475-494b-82b3-96fe7d17c524\" (UID: \"dccd7ac1-5475-494b-82b3-96fe7d17c524\") " Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.739545 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run" (OuterVolumeSpecName: "var-run") pod "dccd7ac1-5475-494b-82b3-96fe7d17c524" (UID: "dccd7ac1-5475-494b-82b3-96fe7d17c524"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.740735 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dccd7ac1-5475-494b-82b3-96fe7d17c524-scripts" (OuterVolumeSpecName: "scripts") pod "dccd7ac1-5475-494b-82b3-96fe7d17c524" (UID: "dccd7ac1-5475-494b-82b3-96fe7d17c524"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.740768 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "dccd7ac1-5475-494b-82b3-96fe7d17c524" (UID: "dccd7ac1-5475-494b-82b3-96fe7d17c524"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.740868 4611 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.744021 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dccd7ac1-5475-494b-82b3-96fe7d17c524-kube-api-access-jbq2w" (OuterVolumeSpecName: "kube-api-access-jbq2w") pod "dccd7ac1-5475-494b-82b3-96fe7d17c524" (UID: "dccd7ac1-5475-494b-82b3-96fe7d17c524"). InnerVolumeSpecName "kube-api-access-jbq2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.758350 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" path="/var/lib/kubelet/pods/bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10/volumes" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.842848 4611 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.842901 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dccd7ac1-5475-494b-82b3-96fe7d17c524-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.842913 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbq2w\" (UniqueName: \"kubernetes.io/projected/dccd7ac1-5475-494b-82b3-96fe7d17c524-kube-api-access-jbq2w\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:27 crc kubenswrapper[4611]: I0929 12:58:27.842923 4611 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/dccd7ac1-5475-494b-82b3-96fe7d17c524-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.358181 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9jsdt-config-tlhfb" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.358196 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9jsdt-config-tlhfb" event={"ID":"dccd7ac1-5475-494b-82b3-96fe7d17c524","Type":"ContainerDied","Data":"9abf394329407889bdfcf6c644952e4bfe92361e00807546c00b07b4676a3553"} Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.358227 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9abf394329407889bdfcf6c644952e4bfe92361e00807546c00b07b4676a3553" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.388811 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620121 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0fb9-account-create-fqsph"] Sep 29 12:58:28 crc kubenswrapper[4611]: E0929 12:58:28.620552 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dccd7ac1-5475-494b-82b3-96fe7d17c524" containerName="ovn-config" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620569 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="dccd7ac1-5475-494b-82b3-96fe7d17c524" containerName="ovn-config" Sep 29 12:58:28 crc kubenswrapper[4611]: E0929 12:58:28.620584 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerName="init" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620592 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerName="init" Sep 29 12:58:28 crc kubenswrapper[4611]: E0929 12:58:28.620607 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerName="dnsmasq-dns" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620614 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerName="dnsmasq-dns" Sep 29 12:58:28 crc kubenswrapper[4611]: E0929 12:58:28.620672 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53965ddb-78b2-40ba-aa0b-808caee352d3" containerName="mariadb-account-create" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620680 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="53965ddb-78b2-40ba-aa0b-808caee352d3" containerName="mariadb-account-create" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620872 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="dccd7ac1-5475-494b-82b3-96fe7d17c524" containerName="ovn-config" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620888 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="53965ddb-78b2-40ba-aa0b-808caee352d3" containerName="mariadb-account-create" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.620910 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdb9ce1d-efbb-4e7f-b260-ec5654cfbb10" containerName="dnsmasq-dns" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.621567 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.623949 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.649896 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0fb9-account-create-fqsph"] Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.755685 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9jsdt-config-tlhfb"] Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.759190 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh97z\" (UniqueName: \"kubernetes.io/projected/0fd4b56b-fd87-409a-a2b8-475d86973e7e-kube-api-access-vh97z\") pod \"barbican-0fb9-account-create-fqsph\" (UID: \"0fd4b56b-fd87-409a-a2b8-475d86973e7e\") " pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.791418 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9jsdt-config-tlhfb"] Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.860414 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh97z\" (UniqueName: \"kubernetes.io/projected/0fd4b56b-fd87-409a-a2b8-475d86973e7e-kube-api-access-vh97z\") pod \"barbican-0fb9-account-create-fqsph\" (UID: \"0fd4b56b-fd87-409a-a2b8-475d86973e7e\") " pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.892815 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh97z\" (UniqueName: \"kubernetes.io/projected/0fd4b56b-fd87-409a-a2b8-475d86973e7e-kube-api-access-vh97z\") pod \"barbican-0fb9-account-create-fqsph\" (UID: \"0fd4b56b-fd87-409a-a2b8-475d86973e7e\") " pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.904139 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-08d9-account-create-4cgzx"] Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.905575 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.909027 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.955512 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-08d9-account-create-4cgzx"] Sep 29 12:58:28 crc kubenswrapper[4611]: I0929 12:58:28.985839 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.064354 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qflj\" (UniqueName: \"kubernetes.io/projected/25628b46-3189-4901-8ce7-c17a4330f8b8-kube-api-access-2qflj\") pod \"neutron-08d9-account-create-4cgzx\" (UID: \"25628b46-3189-4901-8ce7-c17a4330f8b8\") " pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.166647 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qflj\" (UniqueName: \"kubernetes.io/projected/25628b46-3189-4901-8ce7-c17a4330f8b8-kube-api-access-2qflj\") pod \"neutron-08d9-account-create-4cgzx\" (UID: \"25628b46-3189-4901-8ce7-c17a4330f8b8\") " pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.205594 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qflj\" (UniqueName: \"kubernetes.io/projected/25628b46-3189-4901-8ce7-c17a4330f8b8-kube-api-access-2qflj\") pod \"neutron-08d9-account-create-4cgzx\" (UID: \"25628b46-3189-4901-8ce7-c17a4330f8b8\") " pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.270076 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.612540 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0fb9-account-create-fqsph"] Sep 29 12:58:29 crc kubenswrapper[4611]: W0929 12:58:29.631496 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fd4b56b_fd87_409a_a2b8_475d86973e7e.slice/crio-9752a048ffb405a97346083718ce9b85c2614b5b5f88c0984298e31219b25e2e WatchSource:0}: Error finding container 9752a048ffb405a97346083718ce9b85c2614b5b5f88c0984298e31219b25e2e: Status 404 returned error can't find the container with id 9752a048ffb405a97346083718ce9b85c2614b5b5f88c0984298e31219b25e2e Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.746670 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dccd7ac1-5475-494b-82b3-96fe7d17c524" path="/var/lib/kubelet/pods/dccd7ac1-5475-494b-82b3-96fe7d17c524/volumes" Sep 29 12:58:29 crc kubenswrapper[4611]: I0929 12:58:29.794212 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-08d9-account-create-4cgzx"] Sep 29 12:58:30 crc kubenswrapper[4611]: I0929 12:58:30.386993 4611 generic.go:334] "Generic (PLEG): container finished" podID="0fd4b56b-fd87-409a-a2b8-475d86973e7e" containerID="53bcd7e7891373f9b84f84c1d1b7dd05e2ad1547224831e889dfdf07fd8f12a5" exitCode=0 Sep 29 12:58:30 crc kubenswrapper[4611]: I0929 12:58:30.387257 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0fb9-account-create-fqsph" event={"ID":"0fd4b56b-fd87-409a-a2b8-475d86973e7e","Type":"ContainerDied","Data":"53bcd7e7891373f9b84f84c1d1b7dd05e2ad1547224831e889dfdf07fd8f12a5"} Sep 29 12:58:30 crc kubenswrapper[4611]: I0929 12:58:30.387283 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0fb9-account-create-fqsph" event={"ID":"0fd4b56b-fd87-409a-a2b8-475d86973e7e","Type":"ContainerStarted","Data":"9752a048ffb405a97346083718ce9b85c2614b5b5f88c0984298e31219b25e2e"} Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.862853 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-x6848"] Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.865532 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.868083 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.868421 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cs8gt" Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.868501 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.868595 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 12:58:36 crc kubenswrapper[4611]: I0929 12:58:36.885002 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-x6848"] Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.006604 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-config-data\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.006732 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ms9l\" (UniqueName: \"kubernetes.io/projected/4fc77f95-5def-4756-80e5-d2b044505f85-kube-api-access-8ms9l\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.006773 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-combined-ca-bundle\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.108838 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-config-data\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.109289 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ms9l\" (UniqueName: \"kubernetes.io/projected/4fc77f95-5def-4756-80e5-d2b044505f85-kube-api-access-8ms9l\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.109326 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-combined-ca-bundle\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.125508 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-combined-ca-bundle\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.126035 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-config-data\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.128483 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ms9l\" (UniqueName: \"kubernetes.io/projected/4fc77f95-5def-4756-80e5-d2b044505f85-kube-api-access-8ms9l\") pod \"keystone-db-sync-x6848\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:37 crc kubenswrapper[4611]: I0929 12:58:37.184030 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:38 crc kubenswrapper[4611]: I0929 12:58:38.888252 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.044566 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vh97z\" (UniqueName: \"kubernetes.io/projected/0fd4b56b-fd87-409a-a2b8-475d86973e7e-kube-api-access-vh97z\") pod \"0fd4b56b-fd87-409a-a2b8-475d86973e7e\" (UID: \"0fd4b56b-fd87-409a-a2b8-475d86973e7e\") " Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.051198 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fd4b56b-fd87-409a-a2b8-475d86973e7e-kube-api-access-vh97z" (OuterVolumeSpecName: "kube-api-access-vh97z") pod "0fd4b56b-fd87-409a-a2b8-475d86973e7e" (UID: "0fd4b56b-fd87-409a-a2b8-475d86973e7e"). InnerVolumeSpecName "kube-api-access-vh97z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.137024 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-x6848"] Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.146204 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vh97z\" (UniqueName: \"kubernetes.io/projected/0fd4b56b-fd87-409a-a2b8-475d86973e7e-kube-api-access-vh97z\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.472486 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x6848" event={"ID":"4fc77f95-5def-4756-80e5-d2b044505f85","Type":"ContainerStarted","Data":"0544fe71dfcb1f5873a81cf6afeed50e2c41d19bbc965779331751004c1241cf"} Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.474643 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0fb9-account-create-fqsph" event={"ID":"0fd4b56b-fd87-409a-a2b8-475d86973e7e","Type":"ContainerDied","Data":"9752a048ffb405a97346083718ce9b85c2614b5b5f88c0984298e31219b25e2e"} Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.474673 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9752a048ffb405a97346083718ce9b85c2614b5b5f88c0984298e31219b25e2e" Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.474767 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0fb9-account-create-fqsph" Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.484981 4611 generic.go:334] "Generic (PLEG): container finished" podID="25628b46-3189-4901-8ce7-c17a4330f8b8" containerID="e016544a6090ff4b99bc136bbd8f17ca099a4f5e5695fc1589b094597277cad7" exitCode=0 Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.485062 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-08d9-account-create-4cgzx" event={"ID":"25628b46-3189-4901-8ce7-c17a4330f8b8","Type":"ContainerDied","Data":"e016544a6090ff4b99bc136bbd8f17ca099a4f5e5695fc1589b094597277cad7"} Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.485100 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-08d9-account-create-4cgzx" event={"ID":"25628b46-3189-4901-8ce7-c17a4330f8b8","Type":"ContainerStarted","Data":"8aac9af250dcc7ead47ead820c422329854088fdd6a1998cc924388b6c7241ca"} Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.496663 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ljr99" event={"ID":"88e8c098-763d-4a3b-b5e1-3f29d2b37845","Type":"ContainerStarted","Data":"ff65b0f113cc0e8eaeba161d4cc84072e42d536252574e19c149c22039fc02b9"} Sep 29 12:58:39 crc kubenswrapper[4611]: I0929 12:58:39.528704 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-ljr99" podStartSLOduration=2.040882505 podStartE2EDuration="22.528676891s" podCreationTimestamp="2025-09-29 12:58:17 +0000 UTC" firstStartedPulling="2025-09-29 12:58:18.240649801 +0000 UTC m=+1085.132169407" lastFinishedPulling="2025-09-29 12:58:38.728444177 +0000 UTC m=+1105.619963793" observedRunningTime="2025-09-29 12:58:39.52414424 +0000 UTC m=+1106.415663856" watchObservedRunningTime="2025-09-29 12:58:39.528676891 +0000 UTC m=+1106.420196497" Sep 29 12:58:40 crc kubenswrapper[4611]: I0929 12:58:40.825261 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:40 crc kubenswrapper[4611]: I0929 12:58:40.986231 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qflj\" (UniqueName: \"kubernetes.io/projected/25628b46-3189-4901-8ce7-c17a4330f8b8-kube-api-access-2qflj\") pod \"25628b46-3189-4901-8ce7-c17a4330f8b8\" (UID: \"25628b46-3189-4901-8ce7-c17a4330f8b8\") " Sep 29 12:58:40 crc kubenswrapper[4611]: I0929 12:58:40.995819 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25628b46-3189-4901-8ce7-c17a4330f8b8-kube-api-access-2qflj" (OuterVolumeSpecName: "kube-api-access-2qflj") pod "25628b46-3189-4901-8ce7-c17a4330f8b8" (UID: "25628b46-3189-4901-8ce7-c17a4330f8b8"). InnerVolumeSpecName "kube-api-access-2qflj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:41 crc kubenswrapper[4611]: I0929 12:58:41.088012 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qflj\" (UniqueName: \"kubernetes.io/projected/25628b46-3189-4901-8ce7-c17a4330f8b8-kube-api-access-2qflj\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:41 crc kubenswrapper[4611]: I0929 12:58:41.519689 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-08d9-account-create-4cgzx" event={"ID":"25628b46-3189-4901-8ce7-c17a4330f8b8","Type":"ContainerDied","Data":"8aac9af250dcc7ead47ead820c422329854088fdd6a1998cc924388b6c7241ca"} Sep 29 12:58:41 crc kubenswrapper[4611]: I0929 12:58:41.519929 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8aac9af250dcc7ead47ead820c422329854088fdd6a1998cc924388b6c7241ca" Sep 29 12:58:41 crc kubenswrapper[4611]: I0929 12:58:41.519732 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-08d9-account-create-4cgzx" Sep 29 12:58:45 crc kubenswrapper[4611]: I0929 12:58:45.555117 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x6848" event={"ID":"4fc77f95-5def-4756-80e5-d2b044505f85","Type":"ContainerStarted","Data":"78c435430c5473dcf241010ab50ff5ffd14ae148fdf05d8b95a69614b7c6520d"} Sep 29 12:58:45 crc kubenswrapper[4611]: I0929 12:58:45.578676 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-x6848" podStartSLOduration=4.174073153 podStartE2EDuration="9.578658447s" podCreationTimestamp="2025-09-29 12:58:36 +0000 UTC" firstStartedPulling="2025-09-29 12:58:39.151381282 +0000 UTC m=+1106.042900888" lastFinishedPulling="2025-09-29 12:58:44.555966576 +0000 UTC m=+1111.447486182" observedRunningTime="2025-09-29 12:58:45.574837017 +0000 UTC m=+1112.466356623" watchObservedRunningTime="2025-09-29 12:58:45.578658447 +0000 UTC m=+1112.470178053" Sep 29 12:58:51 crc kubenswrapper[4611]: I0929 12:58:51.611195 4611 generic.go:334] "Generic (PLEG): container finished" podID="4fc77f95-5def-4756-80e5-d2b044505f85" containerID="78c435430c5473dcf241010ab50ff5ffd14ae148fdf05d8b95a69614b7c6520d" exitCode=0 Sep 29 12:58:51 crc kubenswrapper[4611]: I0929 12:58:51.611278 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x6848" event={"ID":"4fc77f95-5def-4756-80e5-d2b044505f85","Type":"ContainerDied","Data":"78c435430c5473dcf241010ab50ff5ffd14ae148fdf05d8b95a69614b7c6520d"} Sep 29 12:58:52 crc kubenswrapper[4611]: I0929 12:58:52.918291 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.082572 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-combined-ca-bundle\") pod \"4fc77f95-5def-4756-80e5-d2b044505f85\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.082623 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-config-data\") pod \"4fc77f95-5def-4756-80e5-d2b044505f85\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.082699 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ms9l\" (UniqueName: \"kubernetes.io/projected/4fc77f95-5def-4756-80e5-d2b044505f85-kube-api-access-8ms9l\") pod \"4fc77f95-5def-4756-80e5-d2b044505f85\" (UID: \"4fc77f95-5def-4756-80e5-d2b044505f85\") " Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.089888 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fc77f95-5def-4756-80e5-d2b044505f85-kube-api-access-8ms9l" (OuterVolumeSpecName: "kube-api-access-8ms9l") pod "4fc77f95-5def-4756-80e5-d2b044505f85" (UID: "4fc77f95-5def-4756-80e5-d2b044505f85"). InnerVolumeSpecName "kube-api-access-8ms9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.115870 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fc77f95-5def-4756-80e5-d2b044505f85" (UID: "4fc77f95-5def-4756-80e5-d2b044505f85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.133577 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-config-data" (OuterVolumeSpecName: "config-data") pod "4fc77f95-5def-4756-80e5-d2b044505f85" (UID: "4fc77f95-5def-4756-80e5-d2b044505f85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.185019 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.185054 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc77f95-5def-4756-80e5-d2b044505f85-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.185066 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ms9l\" (UniqueName: \"kubernetes.io/projected/4fc77f95-5def-4756-80e5-d2b044505f85-kube-api-access-8ms9l\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.629021 4611 generic.go:334] "Generic (PLEG): container finished" podID="88e8c098-763d-4a3b-b5e1-3f29d2b37845" containerID="ff65b0f113cc0e8eaeba161d4cc84072e42d536252574e19c149c22039fc02b9" exitCode=0 Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.629157 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ljr99" event={"ID":"88e8c098-763d-4a3b-b5e1-3f29d2b37845","Type":"ContainerDied","Data":"ff65b0f113cc0e8eaeba161d4cc84072e42d536252574e19c149c22039fc02b9"} Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.631763 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x6848" event={"ID":"4fc77f95-5def-4756-80e5-d2b044505f85","Type":"ContainerDied","Data":"0544fe71dfcb1f5873a81cf6afeed50e2c41d19bbc965779331751004c1241cf"} Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.631926 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0544fe71dfcb1f5873a81cf6afeed50e2c41d19bbc965779331751004c1241cf" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.631794 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x6848" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915183 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-756756db9c-6rqbh"] Sep 29 12:58:53 crc kubenswrapper[4611]: E0929 12:58:53.915571 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25628b46-3189-4901-8ce7-c17a4330f8b8" containerName="mariadb-account-create" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915588 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="25628b46-3189-4901-8ce7-c17a4330f8b8" containerName="mariadb-account-create" Sep 29 12:58:53 crc kubenswrapper[4611]: E0929 12:58:53.915596 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fc77f95-5def-4756-80e5-d2b044505f85" containerName="keystone-db-sync" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915602 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fc77f95-5def-4756-80e5-d2b044505f85" containerName="keystone-db-sync" Sep 29 12:58:53 crc kubenswrapper[4611]: E0929 12:58:53.915640 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fd4b56b-fd87-409a-a2b8-475d86973e7e" containerName="mariadb-account-create" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915647 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fd4b56b-fd87-409a-a2b8-475d86973e7e" containerName="mariadb-account-create" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915789 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="25628b46-3189-4901-8ce7-c17a4330f8b8" containerName="mariadb-account-create" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915806 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fc77f95-5def-4756-80e5-d2b044505f85" containerName="keystone-db-sync" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.915819 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fd4b56b-fd87-409a-a2b8-475d86973e7e" containerName="mariadb-account-create" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.916687 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.944162 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-qkvkb"] Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.945222 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.952148 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.952380 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.952698 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cs8gt" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.952843 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 12:58:53 crc kubenswrapper[4611]: I0929 12:58:53.964109 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-756756db9c-6rqbh"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.995098 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-qkvkb"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.997872 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-sb\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.997916 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-swift-storage-0\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.997963 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-nb\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.997988 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qwgp\" (UniqueName: \"kubernetes.io/projected/67ca8632-d9b3-487c-971d-f16ad6646754-kube-api-access-8qwgp\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998067 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-config-data\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998088 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cv6z\" (UniqueName: \"kubernetes.io/projected/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-kube-api-access-4cv6z\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998117 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-combined-ca-bundle\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998143 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-svc\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998169 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-credential-keys\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998198 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-scripts\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998228 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-config\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:53.998260 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-fernet-keys\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.099808 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-nb\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100127 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qwgp\" (UniqueName: \"kubernetes.io/projected/67ca8632-d9b3-487c-971d-f16ad6646754-kube-api-access-8qwgp\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100194 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-config-data\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100217 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cv6z\" (UniqueName: \"kubernetes.io/projected/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-kube-api-access-4cv6z\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100239 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-combined-ca-bundle\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100257 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-svc\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100274 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-credential-keys\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100299 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-scripts\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100321 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-config\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100347 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-fernet-keys\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100375 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-sb\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100397 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-swift-storage-0\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.100793 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-nb\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.101072 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-swift-storage-0\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.101482 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-sb\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.101744 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-svc\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.101864 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-config\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.105879 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-config-data\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.106603 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-fernet-keys\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.107992 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-credential-keys\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.108347 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-combined-ca-bundle\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.121936 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-scripts\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.157429 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cv6z\" (UniqueName: \"kubernetes.io/projected/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-kube-api-access-4cv6z\") pod \"dnsmasq-dns-756756db9c-6rqbh\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.182426 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qwgp\" (UniqueName: \"kubernetes.io/projected/67ca8632-d9b3-487c-971d-f16ad6646754-kube-api-access-8qwgp\") pod \"keystone-bootstrap-qkvkb\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.232762 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.275993 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.311945 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-flx2p"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.313250 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.343759 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-l2rkl" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.344851 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.351576 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.406957 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-flx2p"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.469723 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-nmkf9"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.472330 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.480847 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.481078 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-64pb2" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.506123 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553670 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-combined-ca-bundle\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553731 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-config-data\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553766 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chd4h\" (UniqueName: \"kubernetes.io/projected/81475e0c-543c-43f2-8a53-d9a1e7adcbad-kube-api-access-chd4h\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553800 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-etc-machine-id\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553828 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-config\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553859 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-scripts\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553885 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-combined-ca-bundle\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.553969 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wmdr\" (UniqueName: \"kubernetes.io/projected/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-kube-api-access-7wmdr\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.554009 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-db-sync-config-data\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.556443 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6c64d5f989-8zd7h"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.568853 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.600216 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.600343 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-vhfbb" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.600956 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.619682 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-nmkf9"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.623858 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.658923 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg869\" (UniqueName: \"kubernetes.io/projected/2cb973fe-c427-4569-896f-b348e869c251-kube-api-access-zg869\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.658982 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cb973fe-c427-4569-896f-b348e869c251-horizon-secret-key\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659024 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wmdr\" (UniqueName: \"kubernetes.io/projected/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-kube-api-access-7wmdr\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659052 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-db-sync-config-data\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659085 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-combined-ca-bundle\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659101 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-config-data\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659126 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chd4h\" (UniqueName: \"kubernetes.io/projected/81475e0c-543c-43f2-8a53-d9a1e7adcbad-kube-api-access-chd4h\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659161 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cb973fe-c427-4569-896f-b348e869c251-logs\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659182 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-etc-machine-id\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659198 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-scripts\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659214 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-config\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659230 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-config-data\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659243 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-scripts\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.659262 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-combined-ca-bundle\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.680250 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-etc-machine-id\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.695415 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-combined-ca-bundle\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.707778 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-combined-ca-bundle\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.716671 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-scripts\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.717221 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-config\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.723890 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wmdr\" (UniqueName: \"kubernetes.io/projected/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-kube-api-access-7wmdr\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.723971 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c64d5f989-8zd7h"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.735138 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-db-sync-config-data\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.742294 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-config-data\") pod \"cinder-db-sync-nmkf9\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.747959 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.758097 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chd4h\" (UniqueName: \"kubernetes.io/projected/81475e0c-543c-43f2-8a53-d9a1e7adcbad-kube-api-access-chd4h\") pod \"neutron-db-sync-flx2p\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.766259 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cb973fe-c427-4569-896f-b348e869c251-logs\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.766318 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-scripts\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.766353 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-config-data\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.766387 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg869\" (UniqueName: \"kubernetes.io/projected/2cb973fe-c427-4569-896f-b348e869c251-kube-api-access-zg869\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.766450 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cb973fe-c427-4569-896f-b348e869c251-horizon-secret-key\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.773383 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.776233 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.783463 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.783606 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-756756db9c-6rqbh"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.784354 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-scripts\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.788889 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cb973fe-c427-4569-896f-b348e869c251-logs\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.791469 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-config-data\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.795780 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cb973fe-c427-4569-896f-b348e869c251-horizon-secret-key\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.811772 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-flx2p" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.814476 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg869\" (UniqueName: \"kubernetes.io/projected/2cb973fe-c427-4569-896f-b348e869c251-kube-api-access-zg869\") pod \"horizon-6c64d5f989-8zd7h\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.826675 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-8jjnd"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.827978 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.832990 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.832990 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-zns9h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.833165 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.854601 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.869842 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nmkf9" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.888226 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7464cbd485-kspwh"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.889587 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.910110 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-8jjnd"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.918311 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.926304 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-9sjsz"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.927541 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.934237 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.934726 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-9n8gl" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.970758 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkgjs\" (UniqueName: \"kubernetes.io/projected/f5e45783-38de-4e39-9ad8-3da9ec111aa9-kube-api-access-kkgjs\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.970848 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-config-data\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.970891 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdb7l\" (UniqueName: \"kubernetes.io/projected/62331da1-a2da-4934-b0bd-8cee7d29bdfb-kube-api-access-sdb7l\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.970924 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-scripts\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.970958 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-config-data\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.970981 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5e45783-38de-4e39-9ad8-3da9ec111aa9-logs\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.971004 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-log-httpd\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.971024 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-scripts\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.971045 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-run-httpd\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.971067 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.971093 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-combined-ca-bundle\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.971182 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.975515 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7c859f6869-6v8sc"] Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.978974 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:54 crc kubenswrapper[4611]: I0929 12:58:54.990952 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9sjsz"] Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.023350 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7464cbd485-kspwh"] Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.034384 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c859f6869-6v8sc"] Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.080792 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-config\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.080866 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-scripts\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.080928 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-config-data\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.080949 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5e45783-38de-4e39-9ad8-3da9ec111aa9-logs\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.080968 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-log-httpd\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081010 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-swift-storage-0\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081031 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv2sn\" (UniqueName: \"kubernetes.io/projected/f7342c0d-6bae-483a-9b47-8225216d4952-kube-api-access-qv2sn\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081050 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-scripts\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081077 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-svc\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081097 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-run-httpd\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081112 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081134 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-combined-ca-bundle\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081203 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-nb\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081239 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081280 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snlx8\" (UniqueName: \"kubernetes.io/projected/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-kube-api-access-snlx8\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081301 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkgjs\" (UniqueName: \"kubernetes.io/projected/f5e45783-38de-4e39-9ad8-3da9ec111aa9-kube-api-access-kkgjs\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081323 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-db-sync-config-data\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081402 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-combined-ca-bundle\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081432 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-config-data\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081472 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-sb\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.081507 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdb7l\" (UniqueName: \"kubernetes.io/projected/62331da1-a2da-4934-b0bd-8cee7d29bdfb-kube-api-access-sdb7l\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.082209 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-run-httpd\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.094392 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5e45783-38de-4e39-9ad8-3da9ec111aa9-logs\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.094589 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-log-httpd\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.098215 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-scripts\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.115506 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdb7l\" (UniqueName: \"kubernetes.io/projected/62331da1-a2da-4934-b0bd-8cee7d29bdfb-kube-api-access-sdb7l\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.116011 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-combined-ca-bundle\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.123282 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.124214 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkgjs\" (UniqueName: \"kubernetes.io/projected/f5e45783-38de-4e39-9ad8-3da9ec111aa9-kube-api-access-kkgjs\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.124358 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-config-data\") pod \"placement-db-sync-8jjnd\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.125550 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-config-data\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.129722 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-scripts\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.139008 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.180857 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8jjnd" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184163 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-swift-storage-0\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184200 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv2sn\" (UniqueName: \"kubernetes.io/projected/f7342c0d-6bae-483a-9b47-8225216d4952-kube-api-access-qv2sn\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184220 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-svc\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184256 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-scripts\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184275 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wzfb\" (UniqueName: \"kubernetes.io/projected/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-kube-api-access-5wzfb\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184323 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-nb\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184340 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-logs\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184360 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-config-data\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184394 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snlx8\" (UniqueName: \"kubernetes.io/projected/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-kube-api-access-snlx8\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184416 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-db-sync-config-data\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184442 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-combined-ca-bundle\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184468 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-horizon-secret-key\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184484 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-sb\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.184513 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-config\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.185412 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-config\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.185880 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-swift-storage-0\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.186903 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-svc\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.187572 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-nb\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.208670 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-db-sync-config-data\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.212821 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-combined-ca-bundle\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.213399 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-sb\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.222291 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snlx8\" (UniqueName: \"kubernetes.io/projected/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-kube-api-access-snlx8\") pod \"barbican-db-sync-9sjsz\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.250329 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv2sn\" (UniqueName: \"kubernetes.io/projected/f7342c0d-6bae-483a-9b47-8225216d4952-kube-api-access-qv2sn\") pod \"dnsmasq-dns-7464cbd485-kspwh\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.257054 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.285717 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-scripts\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.285765 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wzfb\" (UniqueName: \"kubernetes.io/projected/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-kube-api-access-5wzfb\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.285821 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-logs\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.285852 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-config-data\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.285938 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-horizon-secret-key\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.287269 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-logs\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.287276 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-scripts\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.288117 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-config-data\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.307292 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-horizon-secret-key\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.354956 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wzfb\" (UniqueName: \"kubernetes.io/projected/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-kube-api-access-5wzfb\") pod \"horizon-7c859f6869-6v8sc\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.437111 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.457076 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-756756db9c-6rqbh"] Sep 29 12:58:55 crc kubenswrapper[4611]: W0929 12:58:55.490773 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5829b9aa_6ba0_4c1a_a768_ce085158bc4d.slice/crio-75236340158f868b026d36cb4711118ae456b73d22d729cdfe3bfdba003d1758 WatchSource:0}: Error finding container 75236340158f868b026d36cb4711118ae456b73d22d729cdfe3bfdba003d1758: Status 404 returned error can't find the container with id 75236340158f868b026d36cb4711118ae456b73d22d729cdfe3bfdba003d1758 Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.534912 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.612028 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-qkvkb"] Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.634077 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:58:55 crc kubenswrapper[4611]: W0929 12:58:55.660841 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67ca8632_d9b3_487c_971d_f16ad6646754.slice/crio-46fa0198fbdb976af808424ddec599952be00f29724659d9b975bf2d7b53ef0f WatchSource:0}: Error finding container 46fa0198fbdb976af808424ddec599952be00f29724659d9b975bf2d7b53ef0f: Status 404 returned error can't find the container with id 46fa0198fbdb976af808424ddec599952be00f29724659d9b975bf2d7b53ef0f Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.687899 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.692478 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qkvkb" event={"ID":"67ca8632-d9b3-487c-971d-f16ad6646754","Type":"ContainerStarted","Data":"46fa0198fbdb976af808424ddec599952be00f29724659d9b975bf2d7b53ef0f"} Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.706227 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" event={"ID":"5829b9aa-6ba0-4c1a-a768-ce085158bc4d","Type":"ContainerStarted","Data":"75236340158f868b026d36cb4711118ae456b73d22d729cdfe3bfdba003d1758"} Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.727003 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ljr99" event={"ID":"88e8c098-763d-4a3b-b5e1-3f29d2b37845","Type":"ContainerDied","Data":"e3824b67b76d0666d1f3c1a21d0d60a5f1c33124aedcfe2d194a4c116df6723c"} Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.727040 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3824b67b76d0666d1f3c1a21d0d60a5f1c33124aedcfe2d194a4c116df6723c" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.727108 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ljr99" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.842264 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-db-sync-config-data\") pod \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.844977 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmts9\" (UniqueName: \"kubernetes.io/projected/88e8c098-763d-4a3b-b5e1-3f29d2b37845-kube-api-access-pmts9\") pod \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.845111 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-config-data\") pod \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.845172 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-combined-ca-bundle\") pod \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\" (UID: \"88e8c098-763d-4a3b-b5e1-3f29d2b37845\") " Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.873408 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "88e8c098-763d-4a3b-b5e1-3f29d2b37845" (UID: "88e8c098-763d-4a3b-b5e1-3f29d2b37845"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.898284 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-flx2p"] Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.900508 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88e8c098-763d-4a3b-b5e1-3f29d2b37845-kube-api-access-pmts9" (OuterVolumeSpecName: "kube-api-access-pmts9") pod "88e8c098-763d-4a3b-b5e1-3f29d2b37845" (UID: "88e8c098-763d-4a3b-b5e1-3f29d2b37845"). InnerVolumeSpecName "kube-api-access-pmts9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.953596 4611 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.953648 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmts9\" (UniqueName: \"kubernetes.io/projected/88e8c098-763d-4a3b-b5e1-3f29d2b37845-kube-api-access-pmts9\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:55 crc kubenswrapper[4611]: I0929 12:58:55.963785 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88e8c098-763d-4a3b-b5e1-3f29d2b37845" (UID: "88e8c098-763d-4a3b-b5e1-3f29d2b37845"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.046493 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-config-data" (OuterVolumeSpecName: "config-data") pod "88e8c098-763d-4a3b-b5e1-3f29d2b37845" (UID: "88e8c098-763d-4a3b-b5e1-3f29d2b37845"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.053937 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c64d5f989-8zd7h"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.061598 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.061666 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88e8c098-763d-4a3b-b5e1-3f29d2b37845-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.218876 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7464cbd485-kspwh"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.255782 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b878cb6dc-jtk8m"] Sep 29 12:58:56 crc kubenswrapper[4611]: E0929 12:58:56.256653 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88e8c098-763d-4a3b-b5e1-3f29d2b37845" containerName="glance-db-sync" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.263807 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="88e8c098-763d-4a3b-b5e1-3f29d2b37845" containerName="glance-db-sync" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.264214 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="88e8c098-763d-4a3b-b5e1-3f29d2b37845" containerName="glance-db-sync" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.265154 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.273017 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-sb\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.307573 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-nb\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.307811 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-svc\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.307989 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcd78\" (UniqueName: \"kubernetes.io/projected/28aa84b8-5636-4f83-bc39-c62f20505d77-kube-api-access-fcd78\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.308116 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-swift-storage-0\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.308231 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-config\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.287426 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b878cb6dc-jtk8m"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.328409 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-nmkf9"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.409256 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-config\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.409339 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-sb\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.409369 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-nb\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.409386 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-svc\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.409433 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcd78\" (UniqueName: \"kubernetes.io/projected/28aa84b8-5636-4f83-bc39-c62f20505d77-kube-api-access-fcd78\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.409466 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-swift-storage-0\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.411398 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-config\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.411842 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-swift-storage-0\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.411871 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-nb\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.413084 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-sb\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.419817 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-svc\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.442933 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcd78\" (UniqueName: \"kubernetes.io/projected/28aa84b8-5636-4f83-bc39-c62f20505d77-kube-api-access-fcd78\") pod \"dnsmasq-dns-6b878cb6dc-jtk8m\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.597181 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.669227 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.728605 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-8jjnd"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.751582 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-flx2p" event={"ID":"81475e0c-543c-43f2-8a53-d9a1e7adcbad","Type":"ContainerStarted","Data":"490d8ae710cb50b606a9a1a23abf966ccd707159c7bc1cd7fbfdbade4c6fa3f5"} Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.752901 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-flx2p" event={"ID":"81475e0c-543c-43f2-8a53-d9a1e7adcbad","Type":"ContainerStarted","Data":"e1444cba63199ef4e454ad5427421abae3ad1bd441e6fe7d67cb9951842f8fc0"} Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.780148 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c64d5f989-8zd7h" event={"ID":"2cb973fe-c427-4569-896f-b348e869c251","Type":"ContainerStarted","Data":"119b5cdd5e36473fd4a74e3d34cd418192b194a99bd7c3db4b4951988eb81e56"} Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.794241 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9sjsz"] Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.796059 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nmkf9" event={"ID":"578c0d20-ef6e-43d0-8290-1ec794d9a0ee","Type":"ContainerStarted","Data":"6add29a6f8fd1d79d349f1d561daca67f72f71b8cb6a57b384a4c58048c829e5"} Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.801412 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-flx2p" podStartSLOduration=2.801388942 podStartE2EDuration="2.801388942s" podCreationTimestamp="2025-09-29 12:58:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:56.789139839 +0000 UTC m=+1123.680659445" watchObservedRunningTime="2025-09-29 12:58:56.801388942 +0000 UTC m=+1123.692908558" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.808712 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qkvkb" event={"ID":"67ca8632-d9b3-487c-971d-f16ad6646754","Type":"ContainerStarted","Data":"21e5532b5f6c50aea86fdda44c584dc060e36c5f9eb62b1d3a38fe7ad7d97f03"} Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.836292 4611 generic.go:334] "Generic (PLEG): container finished" podID="5829b9aa-6ba0-4c1a-a768-ce085158bc4d" containerID="7314dc0a0a423a4ceff1fabc9cb8af82777c560119e5d5cd556b5156d7b6c8d2" exitCode=0 Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.836336 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" event={"ID":"5829b9aa-6ba0-4c1a-a768-ce085158bc4d","Type":"ContainerDied","Data":"7314dc0a0a423a4ceff1fabc9cb8af82777c560119e5d5cd556b5156d7b6c8d2"} Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.876961 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-qkvkb" podStartSLOduration=3.876943641 podStartE2EDuration="3.876943641s" podCreationTimestamp="2025-09-29 12:58:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:58:56.847159362 +0000 UTC m=+1123.738678968" watchObservedRunningTime="2025-09-29 12:58:56.876943641 +0000 UTC m=+1123.768463247" Sep 29 12:58:56 crc kubenswrapper[4611]: I0929 12:58:56.956167 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7464cbd485-kspwh"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.027132 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c859f6869-6v8sc"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.043743 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.045514 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.053611 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.054027 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hd98l" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.054221 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.100569 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.131874 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.131925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-scripts\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.131948 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.131987 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.132006 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-logs\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.132045 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc8fc\" (UniqueName: \"kubernetes.io/projected/8b0e5549-2a65-4008-bf6e-195ca30f794a-kube-api-access-hc8fc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.132067 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-config-data\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.235604 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236008 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-logs\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236057 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc8fc\" (UniqueName: \"kubernetes.io/projected/8b0e5549-2a65-4008-bf6e-195ca30f794a-kube-api-access-hc8fc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236087 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-config-data\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236210 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236250 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-scripts\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236295 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.236626 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.240961 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.241172 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-logs\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.252254 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-scripts\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.259033 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-config-data\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.274610 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc8fc\" (UniqueName: \"kubernetes.io/projected/8b0e5549-2a65-4008-bf6e-195ca30f794a-kube-api-access-hc8fc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.278069 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.334280 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.507998 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.537220 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.565734 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.584248 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.585592 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.645199 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c64d5f989-8zd7h"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.676702 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.762152 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.780354 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-config\") pod \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.780409 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-swift-storage-0\") pod \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.780443 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-nb\") pod \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.780483 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-svc\") pod \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.780563 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cv6z\" (UniqueName: \"kubernetes.io/projected/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-kube-api-access-4cv6z\") pod \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.780584 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-sb\") pod \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\" (UID: \"5829b9aa-6ba0-4c1a-a768-ce085158bc4d\") " Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.799768 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.799842 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.799889 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sh7s\" (UniqueName: \"kubernetes.io/projected/8422e253-4d7d-40b8-a612-433136bcc72c-kube-api-access-9sh7s\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.799975 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-logs\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.800042 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.800240 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.800288 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.815905 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-kube-api-access-4cv6z" (OuterVolumeSpecName: "kube-api-access-4cv6z") pod "5829b9aa-6ba0-4c1a-a768-ce085158bc4d" (UID: "5829b9aa-6ba0-4c1a-a768-ce085158bc4d"). InnerVolumeSpecName "kube-api-access-4cv6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.898640 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5829b9aa-6ba0-4c1a-a768-ce085158bc4d" (UID: "5829b9aa-6ba0-4c1a-a768-ce085158bc4d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.901995 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5829b9aa-6ba0-4c1a-a768-ce085158bc4d" (UID: "5829b9aa-6ba0-4c1a-a768-ce085158bc4d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.904562 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.904625 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.906971 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sh7s\" (UniqueName: \"kubernetes.io/projected/8422e253-4d7d-40b8-a612-433136bcc72c-kube-api-access-9sh7s\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.907049 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-logs\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.907104 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.907484 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.907531 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.908593 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.908616 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cv6z\" (UniqueName: \"kubernetes.io/projected/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-kube-api-access-4cv6z\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.928446 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.909268 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.908647 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-logs\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.922360 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.922800 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.921544 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.948500 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sh7s\" (UniqueName: \"kubernetes.io/projected/8422e253-4d7d-40b8-a612-433136bcc72c-kube-api-access-9sh7s\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.949803 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.964372 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5829b9aa-6ba0-4c1a-a768-ce085158bc4d" (UID: "5829b9aa-6ba0-4c1a-a768-ce085158bc4d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.964882 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-config" (OuterVolumeSpecName: "config") pod "5829b9aa-6ba0-4c1a-a768-ce085158bc4d" (UID: "5829b9aa-6ba0-4c1a-a768-ce085158bc4d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:57 crc kubenswrapper[4611]: I0929 12:58:57.971468 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5829b9aa-6ba0-4c1a-a768-ce085158bc4d" (UID: "5829b9aa-6ba0-4c1a-a768-ce085158bc4d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.033453 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.033488 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.033501 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5829b9aa-6ba0-4c1a-a768-ce085158bc4d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.056015 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.071316 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" podUID="f7342c0d-6bae-483a-9b47-8225216d4952" containerName="init" containerID="cri-o://16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a" gracePeriod=10 Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100292 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8jjnd" event={"ID":"f5e45783-38de-4e39-9ad8-3da9ec111aa9","Type":"ContainerStarted","Data":"f85cbdf987571209b4c44fb0f560a66d3476ada3803b3ef43b476a274b0d9188"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100333 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" event={"ID":"f7342c0d-6bae-483a-9b47-8225216d4952","Type":"ContainerStarted","Data":"16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100353 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" event={"ID":"f7342c0d-6bae-483a-9b47-8225216d4952","Type":"ContainerStarted","Data":"3667e323a4760460cd7da553a43fbd544936aaabd9e95caee1b9e293d0cb0f55"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100363 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9sjsz" event={"ID":"adc1b4a7-82e6-42fa-9117-96db1ddcda7c","Type":"ContainerStarted","Data":"a26e72fe575737452162aa497c6288f64efe6b3c13eaf79bf2f44f58e9b6bc96"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100374 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-684c77c6c9-f5bwj"] Sep 29 12:58:58 crc kubenswrapper[4611]: E0929 12:58:58.100698 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5829b9aa-6ba0-4c1a-a768-ce085158bc4d" containerName="init" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100710 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5829b9aa-6ba0-4c1a-a768-ce085158bc4d" containerName="init" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.100920 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5829b9aa-6ba0-4c1a-a768-ce085158bc4d" containerName="init" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.101999 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b878cb6dc-jtk8m"] Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.102024 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-684c77c6c9-f5bwj"] Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.102042 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.102060 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.102726 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.107315 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.127877 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" event={"ID":"5829b9aa-6ba0-4c1a-a768-ce085158bc4d","Type":"ContainerDied","Data":"75236340158f868b026d36cb4711118ae456b73d22d729cdfe3bfdba003d1758"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.127927 4611 scope.go:117] "RemoveContainer" containerID="7314dc0a0a423a4ceff1fabc9cb8af82777c560119e5d5cd556b5156d7b6c8d2" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.128055 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756756db9c-6rqbh" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.140856 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kwjq\" (UniqueName: \"kubernetes.io/projected/a7d299ff-579b-4d68-aea6-675e740c5f69-kube-api-access-2kwjq\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.140934 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-config-data\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.140982 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-scripts\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.141027 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7d299ff-579b-4d68-aea6-675e740c5f69-logs\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.141054 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a7d299ff-579b-4d68-aea6-675e740c5f69-horizon-secret-key\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.162462 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c859f6869-6v8sc" event={"ID":"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4","Type":"ContainerStarted","Data":"857ec0f405d0cd0ee61ef1cc64fb51db7e2ae282b3b23330a42728c88bf7d672"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.202843 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerStarted","Data":"d86e34fe65a7eeda96147527181b93fbfa27fa2e76f7849a132328ccd9762c3b"} Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.247324 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-config-data\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.247404 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-scripts\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.247449 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7d299ff-579b-4d68-aea6-675e740c5f69-logs\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.247479 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a7d299ff-579b-4d68-aea6-675e740c5f69-horizon-secret-key\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.247618 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kwjq\" (UniqueName: \"kubernetes.io/projected/a7d299ff-579b-4d68-aea6-675e740c5f69-kube-api-access-2kwjq\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.249413 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7d299ff-579b-4d68-aea6-675e740c5f69-logs\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.252245 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-config-data\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.258596 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-scripts\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.265211 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a7d299ff-579b-4d68-aea6-675e740c5f69-horizon-secret-key\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.277234 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kwjq\" (UniqueName: \"kubernetes.io/projected/a7d299ff-579b-4d68-aea6-675e740c5f69-kube-api-access-2kwjq\") pod \"horizon-684c77c6c9-f5bwj\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.438354 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.514548 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-756756db9c-6rqbh"] Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.522983 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-756756db9c-6rqbh"] Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.793523 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.883151 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv2sn\" (UniqueName: \"kubernetes.io/projected/f7342c0d-6bae-483a-9b47-8225216d4952-kube-api-access-qv2sn\") pod \"f7342c0d-6bae-483a-9b47-8225216d4952\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.883231 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-nb\") pod \"f7342c0d-6bae-483a-9b47-8225216d4952\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.883293 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-config\") pod \"f7342c0d-6bae-483a-9b47-8225216d4952\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.883317 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-sb\") pod \"f7342c0d-6bae-483a-9b47-8225216d4952\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.883347 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-svc\") pod \"f7342c0d-6bae-483a-9b47-8225216d4952\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.883369 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-swift-storage-0\") pod \"f7342c0d-6bae-483a-9b47-8225216d4952\" (UID: \"f7342c0d-6bae-483a-9b47-8225216d4952\") " Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.896515 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7342c0d-6bae-483a-9b47-8225216d4952-kube-api-access-qv2sn" (OuterVolumeSpecName: "kube-api-access-qv2sn") pod "f7342c0d-6bae-483a-9b47-8225216d4952" (UID: "f7342c0d-6bae-483a-9b47-8225216d4952"). InnerVolumeSpecName "kube-api-access-qv2sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.944679 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-config" (OuterVolumeSpecName: "config") pod "f7342c0d-6bae-483a-9b47-8225216d4952" (UID: "f7342c0d-6bae-483a-9b47-8225216d4952"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.969099 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f7342c0d-6bae-483a-9b47-8225216d4952" (UID: "f7342c0d-6bae-483a-9b47-8225216d4952"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.987762 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv2sn\" (UniqueName: \"kubernetes.io/projected/f7342c0d-6bae-483a-9b47-8225216d4952-kube-api-access-qv2sn\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.987789 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.987800 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:58 crc kubenswrapper[4611]: I0929 12:58:58.990688 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f7342c0d-6bae-483a-9b47-8225216d4952" (UID: "f7342c0d-6bae-483a-9b47-8225216d4952"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.000920 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f7342c0d-6bae-483a-9b47-8225216d4952" (UID: "f7342c0d-6bae-483a-9b47-8225216d4952"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.001777 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f7342c0d-6bae-483a-9b47-8225216d4952" (UID: "f7342c0d-6bae-483a-9b47-8225216d4952"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.090152 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.090200 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.090216 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7342c0d-6bae-483a-9b47-8225216d4952-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.168601 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.226787 4611 generic.go:334] "Generic (PLEG): container finished" podID="f7342c0d-6bae-483a-9b47-8225216d4952" containerID="16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a" exitCode=0 Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.227171 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" event={"ID":"f7342c0d-6bae-483a-9b47-8225216d4952","Type":"ContainerDied","Data":"16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a"} Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.227198 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" event={"ID":"f7342c0d-6bae-483a-9b47-8225216d4952","Type":"ContainerDied","Data":"3667e323a4760460cd7da553a43fbd544936aaabd9e95caee1b9e293d0cb0f55"} Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.227216 4611 scope.go:117] "RemoveContainer" containerID="16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.227308 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7464cbd485-kspwh" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.254693 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.308449 4611 generic.go:334] "Generic (PLEG): container finished" podID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerID="4e0fadbcc97158d47b8f11427708aecb4d7d031d411bb56cfb48ed3027bb5aeb" exitCode=0 Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.308511 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" event={"ID":"28aa84b8-5636-4f83-bc39-c62f20505d77","Type":"ContainerDied","Data":"4e0fadbcc97158d47b8f11427708aecb4d7d031d411bb56cfb48ed3027bb5aeb"} Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.308543 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" event={"ID":"28aa84b8-5636-4f83-bc39-c62f20505d77","Type":"ContainerStarted","Data":"6f79fa1b52efacc7821e888f47ed9c0bc33fb8dab6c0e67a6dd60812b0896a92"} Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.394373 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7464cbd485-kspwh"] Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.425174 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7464cbd485-kspwh"] Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.449332 4611 scope.go:117] "RemoveContainer" containerID="16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.454498 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-684c77c6c9-f5bwj"] Sep 29 12:58:59 crc kubenswrapper[4611]: E0929 12:58:59.454559 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a\": container with ID starting with 16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a not found: ID does not exist" containerID="16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.454607 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a"} err="failed to get container status \"16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a\": rpc error: code = NotFound desc = could not find container \"16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a\": container with ID starting with 16665a43dc912a298ed21c464e6644812a77ac0f50611bbde07e1df1b960a38a not found: ID does not exist" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.767265 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5829b9aa-6ba0-4c1a-a768-ce085158bc4d" path="/var/lib/kubelet/pods/5829b9aa-6ba0-4c1a-a768-ce085158bc4d/volumes" Sep 29 12:58:59 crc kubenswrapper[4611]: I0929 12:58:59.768983 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7342c0d-6bae-483a-9b47-8225216d4952" path="/var/lib/kubelet/pods/f7342c0d-6bae-483a-9b47-8225216d4952/volumes" Sep 29 12:59:00 crc kubenswrapper[4611]: I0929 12:59:00.365050 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-684c77c6c9-f5bwj" event={"ID":"a7d299ff-579b-4d68-aea6-675e740c5f69","Type":"ContainerStarted","Data":"9d356239a2ce34315c7fd9a9b7c3de39cd05897f761b0eb5414e55ac1769259b"} Sep 29 12:59:00 crc kubenswrapper[4611]: I0929 12:59:00.369868 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8422e253-4d7d-40b8-a612-433136bcc72c","Type":"ContainerStarted","Data":"99f9e397de94cd886729e0f3dbfad7ead1255ae5a801e3d12a22d9314d940ef6"} Sep 29 12:59:00 crc kubenswrapper[4611]: I0929 12:59:00.399235 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8b0e5549-2a65-4008-bf6e-195ca30f794a","Type":"ContainerStarted","Data":"7bf2d09d45d5308b55350bf2af3ed613797d56d328129e8b5d8fc6b043dc9700"} Sep 29 12:59:00 crc kubenswrapper[4611]: I0929 12:59:00.406693 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" event={"ID":"28aa84b8-5636-4f83-bc39-c62f20505d77","Type":"ContainerStarted","Data":"3b225203218ca9e7e7971a354a5cdc7764227140693510ffdbed658d847b0f42"} Sep 29 12:59:00 crc kubenswrapper[4611]: I0929 12:59:00.406886 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:59:00 crc kubenswrapper[4611]: I0929 12:59:00.439002 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" podStartSLOduration=4.438985795 podStartE2EDuration="4.438985795s" podCreationTimestamp="2025-09-29 12:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:00.427228506 +0000 UTC m=+1127.318748112" watchObservedRunningTime="2025-09-29 12:59:00.438985795 +0000 UTC m=+1127.330505401" Sep 29 12:59:01 crc kubenswrapper[4611]: I0929 12:59:01.437047 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8422e253-4d7d-40b8-a612-433136bcc72c","Type":"ContainerStarted","Data":"4ea898188c86a90de47be684c4db4ec7d64bc1a93786ea06b1e3a4993de7e37a"} Sep 29 12:59:01 crc kubenswrapper[4611]: I0929 12:59:01.443688 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8b0e5549-2a65-4008-bf6e-195ca30f794a","Type":"ContainerStarted","Data":"bbb1bce54b28fd3c8f245e2258c71bad04553da19d048c56c6c79fec534d1e13"} Sep 29 12:59:03 crc kubenswrapper[4611]: I0929 12:59:03.487505 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8b0e5549-2a65-4008-bf6e-195ca30f794a","Type":"ContainerStarted","Data":"4d950c12a63512a84c79c8f95ead50918914bddf69b70a98a68baab58793f419"} Sep 29 12:59:03 crc kubenswrapper[4611]: I0929 12:59:03.487763 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-httpd" containerID="cri-o://4d950c12a63512a84c79c8f95ead50918914bddf69b70a98a68baab58793f419" gracePeriod=30 Sep 29 12:59:03 crc kubenswrapper[4611]: I0929 12:59:03.488140 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-log" containerID="cri-o://bbb1bce54b28fd3c8f245e2258c71bad04553da19d048c56c6c79fec534d1e13" gracePeriod=30 Sep 29 12:59:03 crc kubenswrapper[4611]: I0929 12:59:03.786397 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.786378009 podStartE2EDuration="8.786378009s" podCreationTimestamp="2025-09-29 12:58:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:03.529259535 +0000 UTC m=+1130.420779141" watchObservedRunningTime="2025-09-29 12:59:03.786378009 +0000 UTC m=+1130.677897615" Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.509746 4611 generic.go:334] "Generic (PLEG): container finished" podID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerID="4d950c12a63512a84c79c8f95ead50918914bddf69b70a98a68baab58793f419" exitCode=0 Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.510056 4611 generic.go:334] "Generic (PLEG): container finished" podID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerID="bbb1bce54b28fd3c8f245e2258c71bad04553da19d048c56c6c79fec534d1e13" exitCode=143 Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.510101 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8b0e5549-2a65-4008-bf6e-195ca30f794a","Type":"ContainerDied","Data":"4d950c12a63512a84c79c8f95ead50918914bddf69b70a98a68baab58793f419"} Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.510139 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8b0e5549-2a65-4008-bf6e-195ca30f794a","Type":"ContainerDied","Data":"bbb1bce54b28fd3c8f245e2258c71bad04553da19d048c56c6c79fec534d1e13"} Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.513191 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8422e253-4d7d-40b8-a612-433136bcc72c","Type":"ContainerStarted","Data":"e03bc1fc0c6ce4dc97eceec069147b1d9f30b474c0d9907a0f9fcb3103b8d65d"} Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.513325 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-log" containerID="cri-o://4ea898188c86a90de47be684c4db4ec7d64bc1a93786ea06b1e3a4993de7e37a" gracePeriod=30 Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.513715 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-httpd" containerID="cri-o://e03bc1fc0c6ce4dc97eceec069147b1d9f30b474c0d9907a0f9fcb3103b8d65d" gracePeriod=30 Sep 29 12:59:04 crc kubenswrapper[4611]: I0929 12:59:04.580995 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.580979601 podStartE2EDuration="8.580979601s" podCreationTimestamp="2025-09-29 12:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:04.579004204 +0000 UTC m=+1131.470523810" watchObservedRunningTime="2025-09-29 12:59:04.580979601 +0000 UTC m=+1131.472499207" Sep 29 12:59:05 crc kubenswrapper[4611]: I0929 12:59:05.543564 4611 generic.go:334] "Generic (PLEG): container finished" podID="8422e253-4d7d-40b8-a612-433136bcc72c" containerID="e03bc1fc0c6ce4dc97eceec069147b1d9f30b474c0d9907a0f9fcb3103b8d65d" exitCode=0 Sep 29 12:59:05 crc kubenswrapper[4611]: I0929 12:59:05.543603 4611 generic.go:334] "Generic (PLEG): container finished" podID="8422e253-4d7d-40b8-a612-433136bcc72c" containerID="4ea898188c86a90de47be684c4db4ec7d64bc1a93786ea06b1e3a4993de7e37a" exitCode=143 Sep 29 12:59:05 crc kubenswrapper[4611]: I0929 12:59:05.543665 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8422e253-4d7d-40b8-a612-433136bcc72c","Type":"ContainerDied","Data":"e03bc1fc0c6ce4dc97eceec069147b1d9f30b474c0d9907a0f9fcb3103b8d65d"} Sep 29 12:59:05 crc kubenswrapper[4611]: I0929 12:59:05.543695 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8422e253-4d7d-40b8-a612-433136bcc72c","Type":"ContainerDied","Data":"4ea898188c86a90de47be684c4db4ec7d64bc1a93786ea06b1e3a4993de7e37a"} Sep 29 12:59:05 crc kubenswrapper[4611]: I0929 12:59:05.546928 4611 generic.go:334] "Generic (PLEG): container finished" podID="67ca8632-d9b3-487c-971d-f16ad6646754" containerID="21e5532b5f6c50aea86fdda44c584dc060e36c5f9eb62b1d3a38fe7ad7d97f03" exitCode=0 Sep 29 12:59:05 crc kubenswrapper[4611]: I0929 12:59:05.546965 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qkvkb" event={"ID":"67ca8632-d9b3-487c-971d-f16ad6646754","Type":"ContainerDied","Data":"21e5532b5f6c50aea86fdda44c584dc060e36c5f9eb62b1d3a38fe7ad7d97f03"} Sep 29 12:59:06 crc kubenswrapper[4611]: I0929 12:59:06.599837 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:59:06 crc kubenswrapper[4611]: I0929 12:59:06.673813 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5449cc85b9-rgp8v"] Sep 29 12:59:06 crc kubenswrapper[4611]: I0929 12:59:06.674368 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="dnsmasq-dns" containerID="cri-o://45beec9f037b8f8d5137a9276aea854ea3ce4bc7e5b0a0be47e058d6a1f01757" gracePeriod=10 Sep 29 12:59:07 crc kubenswrapper[4611]: I0929 12:59:07.565002 4611 generic.go:334] "Generic (PLEG): container finished" podID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerID="45beec9f037b8f8d5137a9276aea854ea3ce4bc7e5b0a0be47e058d6a1f01757" exitCode=0 Sep 29 12:59:07 crc kubenswrapper[4611]: I0929 12:59:07.565042 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" event={"ID":"f994bbc6-ef59-4f7c-9c0b-709d712b5925","Type":"ContainerDied","Data":"45beec9f037b8f8d5137a9276aea854ea3ce4bc7e5b0a0be47e058d6a1f01757"} Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.087470 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c859f6869-6v8sc"] Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.119377 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5c9489c674-t4fp8"] Sep 29 12:59:08 crc kubenswrapper[4611]: E0929 12:59:08.119750 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7342c0d-6bae-483a-9b47-8225216d4952" containerName="init" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.119766 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7342c0d-6bae-483a-9b47-8225216d4952" containerName="init" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.119942 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7342c0d-6bae-483a-9b47-8225216d4952" containerName="init" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.125288 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.127956 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.145318 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c9489c674-t4fp8"] Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.216903 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-684c77c6c9-f5bwj"] Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.232667 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6f7ffcb4c4-cz4zj"] Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.234578 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.255523 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f7ffcb4c4-cz4zj"] Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264658 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a263e8a7-9837-4368-aa41-01bd60fabd6d-logs\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264731 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-scripts\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264799 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-combined-ca-bundle\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264852 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-tls-certs\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264913 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-secret-key\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264940 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz8sk\" (UniqueName: \"kubernetes.io/projected/a263e8a7-9837-4368-aa41-01bd60fabd6d-kube-api-access-cz8sk\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.264966 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-config-data\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367199 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-tls-certs\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367279 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-config-data\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367301 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-tls-certs\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367324 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77kgc\" (UniqueName: \"kubernetes.io/projected/66aeb2b0-254f-4c1c-b565-438e9f754366-kube-api-access-77kgc\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367367 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-secret-key\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367407 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-secret-key\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367439 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz8sk\" (UniqueName: \"kubernetes.io/projected/a263e8a7-9837-4368-aa41-01bd60fabd6d-kube-api-access-cz8sk\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367457 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66aeb2b0-254f-4c1c-b565-438e9f754366-logs\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367475 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-config-data\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367517 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-combined-ca-bundle\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367535 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-scripts\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367579 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a263e8a7-9837-4368-aa41-01bd60fabd6d-logs\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367608 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-scripts\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.367678 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-combined-ca-bundle\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.368466 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a263e8a7-9837-4368-aa41-01bd60fabd6d-logs\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.369275 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-config-data\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.369719 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-scripts\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.373345 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-secret-key\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.374379 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-combined-ca-bundle\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.388236 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-tls-certs\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.396950 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz8sk\" (UniqueName: \"kubernetes.io/projected/a263e8a7-9837-4368-aa41-01bd60fabd6d-kube-api-access-cz8sk\") pod \"horizon-5c9489c674-t4fp8\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.465174 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468535 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-tls-certs\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468575 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-config-data\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468599 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77kgc\" (UniqueName: \"kubernetes.io/projected/66aeb2b0-254f-4c1c-b565-438e9f754366-kube-api-access-77kgc\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468647 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-secret-key\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468670 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66aeb2b0-254f-4c1c-b565-438e9f754366-logs\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468705 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-combined-ca-bundle\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.468728 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-scripts\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.469552 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66aeb2b0-254f-4c1c-b565-438e9f754366-logs\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.469704 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-scripts\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.470432 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-config-data\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.473196 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-tls-certs\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.473634 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-combined-ca-bundle\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.479365 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-secret-key\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.488703 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77kgc\" (UniqueName: \"kubernetes.io/projected/66aeb2b0-254f-4c1c-b565-438e9f754366-kube-api-access-77kgc\") pod \"horizon-6f7ffcb4c4-cz4zj\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:08 crc kubenswrapper[4611]: I0929 12:59:08.561192 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:10 crc kubenswrapper[4611]: I0929 12:59:10.884989 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Sep 29 12:59:13 crc kubenswrapper[4611]: E0929 12:59:13.573314 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-placement-api:current-tested" Sep 29 12:59:13 crc kubenswrapper[4611]: E0929 12:59:13.573955 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-placement-api:current-tested" Sep 29 12:59:13 crc kubenswrapper[4611]: E0929 12:59:13.574323 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-placement-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkgjs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-8jjnd_openstack(f5e45783-38de-4e39-9ad8-3da9ec111aa9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:59:13 crc kubenswrapper[4611]: E0929 12:59:13.575837 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-8jjnd" podUID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" Sep 29 12:59:13 crc kubenswrapper[4611]: E0929 12:59:13.631018 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-placement-api:current-tested\\\"\"" pod="openstack/placement-db-sync-8jjnd" podUID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" Sep 29 12:59:15 crc kubenswrapper[4611]: I0929 12:59:15.885613 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.726413 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.726890 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.727008 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb5h57ch69h576h5fdhb7h546h645h5f6h677h68dh677h5d4h59fh679h54bhc7h5f4h546hddh77h5d9hc4h689h554h5dh647hdbhcbh689h687h66fq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2kwjq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-684c77c6c9-f5bwj_openstack(a7d299ff-579b-4d68-aea6-675e740c5f69): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.729887 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested\\\"\"]" pod="openstack/horizon-684c77c6c9-f5bwj" podUID="a7d299ff-579b-4d68-aea6-675e740c5f69" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.734327 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.734397 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.734570 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf5h58bh65dhbfh544h5c7h546h5dbh5d5h65dh89h5f7h594h658h77h549h569h56h67h665h5bdh5ffh564hcch584h99h97h546hd7hb7h554h558q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zg869,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6c64d5f989-8zd7h_openstack(2cb973fe-c427-4569-896f-b348e869c251): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:59:18 crc kubenswrapper[4611]: E0929 12:59:18.739592 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested\\\"\"]" pod="openstack/horizon-6c64d5f989-8zd7h" podUID="2cb973fe-c427-4569-896f-b348e869c251" Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.805212 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.976239 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-credential-keys\") pod \"67ca8632-d9b3-487c-971d-f16ad6646754\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.976295 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qwgp\" (UniqueName: \"kubernetes.io/projected/67ca8632-d9b3-487c-971d-f16ad6646754-kube-api-access-8qwgp\") pod \"67ca8632-d9b3-487c-971d-f16ad6646754\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.976453 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-combined-ca-bundle\") pod \"67ca8632-d9b3-487c-971d-f16ad6646754\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.976476 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-fernet-keys\") pod \"67ca8632-d9b3-487c-971d-f16ad6646754\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.976522 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-scripts\") pod \"67ca8632-d9b3-487c-971d-f16ad6646754\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.976573 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-config-data\") pod \"67ca8632-d9b3-487c-971d-f16ad6646754\" (UID: \"67ca8632-d9b3-487c-971d-f16ad6646754\") " Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.985064 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "67ca8632-d9b3-487c-971d-f16ad6646754" (UID: "67ca8632-d9b3-487c-971d-f16ad6646754"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.985299 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-scripts" (OuterVolumeSpecName: "scripts") pod "67ca8632-d9b3-487c-971d-f16ad6646754" (UID: "67ca8632-d9b3-487c-971d-f16ad6646754"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.988411 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67ca8632-d9b3-487c-971d-f16ad6646754-kube-api-access-8qwgp" (OuterVolumeSpecName: "kube-api-access-8qwgp") pod "67ca8632-d9b3-487c-971d-f16ad6646754" (UID: "67ca8632-d9b3-487c-971d-f16ad6646754"). InnerVolumeSpecName "kube-api-access-8qwgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:18 crc kubenswrapper[4611]: I0929 12:59:18.999293 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "67ca8632-d9b3-487c-971d-f16ad6646754" (UID: "67ca8632-d9b3-487c-971d-f16ad6646754"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.004589 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67ca8632-d9b3-487c-971d-f16ad6646754" (UID: "67ca8632-d9b3-487c-971d-f16ad6646754"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.025232 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-config-data" (OuterVolumeSpecName: "config-data") pod "67ca8632-d9b3-487c-971d-f16ad6646754" (UID: "67ca8632-d9b3-487c-971d-f16ad6646754"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.078600 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.078655 4611 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.078674 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.078685 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.078695 4611 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/67ca8632-d9b3-487c-971d-f16ad6646754-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.078707 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qwgp\" (UniqueName: \"kubernetes.io/projected/67ca8632-d9b3-487c-971d-f16ad6646754-kube-api-access-8qwgp\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.650650 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current-tested" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.650997 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current-tested" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.651103 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current-tested,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-snlx8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-9sjsz_openstack(adc1b4a7-82e6-42fa-9117-96db1ddcda7c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.651839 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.651891 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.652023 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n85h5c6h56h54bh9fh54fh67dh578hcdhb8hd8hdbh64bhddh569h564hdfh6ch5fhcfh66bh656h567h6h57ch555h676h7bh576h657h74hfbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5wzfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7c859f6869-6v8sc_openstack(5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.653302 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-9sjsz" podUID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.654737 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-horizon:current-tested\\\"\"]" pod="openstack/horizon-7c859f6869-6v8sc" podUID="5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.680913 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qkvkb" event={"ID":"67ca8632-d9b3-487c-971d-f16ad6646754","Type":"ContainerDied","Data":"46fa0198fbdb976af808424ddec599952be00f29724659d9b975bf2d7b53ef0f"} Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.680958 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46fa0198fbdb976af808424ddec599952be00f29724659d9b975bf2d7b53ef0f" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.681022 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qkvkb" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.688211 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8b0e5549-2a65-4008-bf6e-195ca30f794a","Type":"ContainerDied","Data":"7bf2d09d45d5308b55350bf2af3ed613797d56d328129e8b5d8fc6b043dc9700"} Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.688262 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bf2d09d45d5308b55350bf2af3ed613797d56d328129e8b5d8fc6b043dc9700" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.694496 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8422e253-4d7d-40b8-a612-433136bcc72c","Type":"ContainerDied","Data":"99f9e397de94cd886729e0f3dbfad7ead1255ae5a801e3d12a22d9314d940ef6"} Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.694534 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99f9e397de94cd886729e0f3dbfad7ead1255ae5a801e3d12a22d9314d940ef6" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.713443 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" event={"ID":"f994bbc6-ef59-4f7c-9c0b-709d712b5925","Type":"ContainerDied","Data":"191ba79a83f97ae2b9b355dd918620a13ec26db0958a01f623a1de4ecb2a864d"} Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.713480 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="191ba79a83f97ae2b9b355dd918620a13ec26db0958a01f623a1de4ecb2a864d" Sep 29 12:59:19 crc kubenswrapper[4611]: E0929 12:59:19.716598 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current-tested\\\"\"" pod="openstack/barbican-db-sync-9sjsz" podUID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.783380 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.790844 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckf5p\" (UniqueName: \"kubernetes.io/projected/f994bbc6-ef59-4f7c-9c0b-709d712b5925-kube-api-access-ckf5p\") pod \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.790901 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-nb\") pod \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.790985 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-svc\") pod \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.791034 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-sb\") pod \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.791109 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-swift-storage-0\") pod \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.791167 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-config\") pod \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\" (UID: \"f994bbc6-ef59-4f7c-9c0b-709d712b5925\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.798287 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f994bbc6-ef59-4f7c-9c0b-709d712b5925-kube-api-access-ckf5p" (OuterVolumeSpecName: "kube-api-access-ckf5p") pod "f994bbc6-ef59-4f7c-9c0b-709d712b5925" (UID: "f994bbc6-ef59-4f7c-9c0b-709d712b5925"). InnerVolumeSpecName "kube-api-access-ckf5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.813425 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894047 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-logs\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894087 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-scripts\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894121 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-combined-ca-bundle\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894197 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-config-data\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894218 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894243 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc8fc\" (UniqueName: \"kubernetes.io/projected/8b0e5549-2a65-4008-bf6e-195ca30f794a-kube-api-access-hc8fc\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894289 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-httpd-run\") pod \"8b0e5549-2a65-4008-bf6e-195ca30f794a\" (UID: \"8b0e5549-2a65-4008-bf6e-195ca30f794a\") " Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894547 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckf5p\" (UniqueName: \"kubernetes.io/projected/f994bbc6-ef59-4f7c-9c0b-709d712b5925-kube-api-access-ckf5p\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.894961 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.897195 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-logs" (OuterVolumeSpecName: "logs") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.921812 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 12:59:19 crc kubenswrapper[4611]: I0929 12:59:19.951681 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b0e5549-2a65-4008-bf6e-195ca30f794a-kube-api-access-hc8fc" (OuterVolumeSpecName: "kube-api-access-hc8fc") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "kube-api-access-hc8fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.003160 4611 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.003494 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b0e5549-2a65-4008-bf6e-195ca30f794a-logs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.003588 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.003603 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc8fc\" (UniqueName: \"kubernetes.io/projected/8b0e5549-2a65-4008-bf6e-195ca30f794a-kube-api-access-hc8fc\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.004565 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-qkvkb"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.006231 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-scripts" (OuterVolumeSpecName: "scripts") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.007941 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f994bbc6-ef59-4f7c-9c0b-709d712b5925" (UID: "f994bbc6-ef59-4f7c-9c0b-709d712b5925"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.010352 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f994bbc6-ef59-4f7c-9c0b-709d712b5925" (UID: "f994bbc6-ef59-4f7c-9c0b-709d712b5925"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.014513 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.027416 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-qkvkb"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.038485 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-config" (OuterVolumeSpecName: "config") pod "f994bbc6-ef59-4f7c-9c0b-709d712b5925" (UID: "f994bbc6-ef59-4f7c-9c0b-709d712b5925"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.079052 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-config-data" (OuterVolumeSpecName: "config-data") pod "8b0e5549-2a65-4008-bf6e-195ca30f794a" (UID: "8b0e5549-2a65-4008-bf6e-195ca30f794a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.080388 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-w8cp9"] Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.081100 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-httpd" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081120 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-httpd" Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.081145 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="dnsmasq-dns" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081151 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="dnsmasq-dns" Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.081172 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="init" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081178 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="init" Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.081199 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-log" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081206 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-log" Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.081229 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67ca8632-d9b3-487c-971d-f16ad6646754" containerName="keystone-bootstrap" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081236 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="67ca8632-d9b3-487c-971d-f16ad6646754" containerName="keystone-bootstrap" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081551 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" containerName="dnsmasq-dns" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081571 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="67ca8632-d9b3-487c-971d-f16ad6646754" containerName="keystone-bootstrap" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081593 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-httpd" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.081605 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" containerName="glance-log" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.082518 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.086826 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.087260 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cs8gt" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.087641 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.088573 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.088923 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w8cp9"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.094210 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112077 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-scripts\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112407 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-httpd-run\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112466 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-logs\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112542 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112596 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sh7s\" (UniqueName: \"kubernetes.io/projected/8422e253-4d7d-40b8-a612-433136bcc72c-kube-api-access-9sh7s\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112650 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-combined-ca-bundle\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.112712 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-config-data\") pod \"8422e253-4d7d-40b8-a612-433136bcc72c\" (UID: \"8422e253-4d7d-40b8-a612-433136bcc72c\") " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.113099 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-fernet-keys\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.113164 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-config-data\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.113234 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-credential-keys\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114448 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvqc5\" (UniqueName: \"kubernetes.io/projected/d1ff376d-1d04-41b1-a417-6de011ef3054-kube-api-access-bvqc5\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114533 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-scripts\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114583 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-combined-ca-bundle\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114751 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114774 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114789 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114803 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114818 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.114835 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b0e5549-2a65-4008-bf6e-195ca30f794a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.115370 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.116608 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-logs" (OuterVolumeSpecName: "logs") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.129646 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8422e253-4d7d-40b8-a612-433136bcc72c-kube-api-access-9sh7s" (OuterVolumeSpecName: "kube-api-access-9sh7s") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "kube-api-access-9sh7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.134591 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f994bbc6-ef59-4f7c-9c0b-709d712b5925" (UID: "f994bbc6-ef59-4f7c-9c0b-709d712b5925"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.159466 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-scripts" (OuterVolumeSpecName: "scripts") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.161028 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.172398 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.174241 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.181132 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f994bbc6-ef59-4f7c-9c0b-709d712b5925" (UID: "f994bbc6-ef59-4f7c-9c0b-709d712b5925"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215444 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-config-data\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215515 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-credential-keys\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215565 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvqc5\" (UniqueName: \"kubernetes.io/projected/d1ff376d-1d04-41b1-a417-6de011ef3054-kube-api-access-bvqc5\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215602 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-scripts\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215643 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-combined-ca-bundle\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215689 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-fernet-keys\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215751 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sh7s\" (UniqueName: \"kubernetes.io/projected/8422e253-4d7d-40b8-a612-433136bcc72c-kube-api-access-9sh7s\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215763 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215774 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215784 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f994bbc6-ef59-4f7c-9c0b-709d712b5925-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215793 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215802 4611 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215811 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8422e253-4d7d-40b8-a612-433136bcc72c-logs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215822 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.215845 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.219405 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-scripts\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.220230 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-fernet-keys\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.220895 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-config-data\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.222474 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-combined-ca-bundle\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.224448 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-credential-keys\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.240615 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvqc5\" (UniqueName: \"kubernetes.io/projected/d1ff376d-1d04-41b1-a417-6de011ef3054-kube-api-access-bvqc5\") pod \"keystone-bootstrap-w8cp9\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.257472 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.258908 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-config-data" (OuterVolumeSpecName: "config-data") pod "8422e253-4d7d-40b8-a612-433136bcc72c" (UID: "8422e253-4d7d-40b8-a612-433136bcc72c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.316765 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.322401 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8422e253-4d7d-40b8-a612-433136bcc72c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.468177 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.723795 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.724618 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.724704 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449cc85b9-rgp8v" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.787396 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5449cc85b9-rgp8v"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.800857 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5449cc85b9-rgp8v"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.813549 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.834602 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.849722 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.861796 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.874069 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.874840 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-httpd" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.874862 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-httpd" Sep 29 12:59:20 crc kubenswrapper[4611]: E0929 12:59:20.874877 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-log" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.874884 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-log" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.875157 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-httpd" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.875191 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" containerName="glance-log" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.876756 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.879099 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.879338 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hd98l" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.879636 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.890610 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.890863 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.893036 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.897184 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.897384 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.902403 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:59:20 crc kubenswrapper[4611]: I0929 12:59:20.911508 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.034985 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035032 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035083 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035117 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p79d\" (UniqueName: \"kubernetes.io/projected/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-kube-api-access-6p79d\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035147 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035178 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035349 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035404 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhf76\" (UniqueName: \"kubernetes.io/projected/51a74707-06e6-48d9-8636-a921a4a559e6-kube-api-access-fhf76\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035456 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-scripts\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035493 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-config-data\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035528 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035549 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-logs\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035589 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035705 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035763 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.035819 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.136955 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p79d\" (UniqueName: \"kubernetes.io/projected/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-kube-api-access-6p79d\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137007 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137035 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137066 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137093 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhf76\" (UniqueName: \"kubernetes.io/projected/51a74707-06e6-48d9-8636-a921a4a559e6-kube-api-access-fhf76\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137120 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-scripts\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137142 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-config-data\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137163 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137179 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-logs\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137202 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137223 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137255 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137280 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137304 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137326 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.137363 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.138505 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.138181 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.139043 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.139257 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-logs\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.139532 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.147012 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.158946 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.165899 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p79d\" (UniqueName: \"kubernetes.io/projected/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-kube-api-access-6p79d\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.175248 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-scripts\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.176138 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.179416 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.180848 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-config-data\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.181058 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.181718 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.194046 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.211681 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.230024 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.247455 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhf76\" (UniqueName: \"kubernetes.io/projected/51a74707-06e6-48d9-8636-a921a4a559e6-kube-api-access-fhf76\") pod \"glance-default-internal-api-0\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.507257 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.535983 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.747751 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67ca8632-d9b3-487c-971d-f16ad6646754" path="/var/lib/kubelet/pods/67ca8632-d9b3-487c-971d-f16ad6646754/volumes" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.748320 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8422e253-4d7d-40b8-a612-433136bcc72c" path="/var/lib/kubelet/pods/8422e253-4d7d-40b8-a612-433136bcc72c/volumes" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.749043 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b0e5549-2a65-4008-bf6e-195ca30f794a" path="/var/lib/kubelet/pods/8b0e5549-2a65-4008-bf6e-195ca30f794a/volumes" Sep 29 12:59:21 crc kubenswrapper[4611]: I0929 12:59:21.750210 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f994bbc6-ef59-4f7c-9c0b-709d712b5925" path="/var/lib/kubelet/pods/f994bbc6-ef59-4f7c-9c0b-709d712b5925/volumes" Sep 29 12:59:25 crc kubenswrapper[4611]: I0929 12:59:25.771579 4611 generic.go:334] "Generic (PLEG): container finished" podID="81475e0c-543c-43f2-8a53-d9a1e7adcbad" containerID="490d8ae710cb50b606a9a1a23abf966ccd707159c7bc1cd7fbfdbade4c6fa3f5" exitCode=0 Sep 29 12:59:25 crc kubenswrapper[4611]: I0929 12:59:25.771654 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-flx2p" event={"ID":"81475e0c-543c-43f2-8a53-d9a1e7adcbad","Type":"ContainerDied","Data":"490d8ae710cb50b606a9a1a23abf966ccd707159c7bc1cd7fbfdbade4c6fa3f5"} Sep 29 12:59:28 crc kubenswrapper[4611]: I0929 12:59:28.360216 4611 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod5829b9aa-6ba0-4c1a-a768-ce085158bc4d"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod5829b9aa-6ba0-4c1a-a768-ce085158bc4d] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5829b9aa_6ba0_4c1a_a768_ce085158bc4d.slice" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.004909 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.012582 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.017077 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.037249 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-flx2p" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130507 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cb973fe-c427-4569-896f-b348e869c251-logs\") pod \"2cb973fe-c427-4569-896f-b348e869c251\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130654 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kwjq\" (UniqueName: \"kubernetes.io/projected/a7d299ff-579b-4d68-aea6-675e740c5f69-kube-api-access-2kwjq\") pod \"a7d299ff-579b-4d68-aea6-675e740c5f69\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130680 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cb973fe-c427-4569-896f-b348e869c251-horizon-secret-key\") pod \"2cb973fe-c427-4569-896f-b348e869c251\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130732 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-scripts\") pod \"a7d299ff-579b-4d68-aea6-675e740c5f69\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130777 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a7d299ff-579b-4d68-aea6-675e740c5f69-horizon-secret-key\") pod \"a7d299ff-579b-4d68-aea6-675e740c5f69\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130795 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wzfb\" (UniqueName: \"kubernetes.io/projected/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-kube-api-access-5wzfb\") pod \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130839 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-horizon-secret-key\") pod \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130871 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-scripts\") pod \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130891 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7d299ff-579b-4d68-aea6-675e740c5f69-logs\") pod \"a7d299ff-579b-4d68-aea6-675e740c5f69\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130887 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cb973fe-c427-4569-896f-b348e869c251-logs" (OuterVolumeSpecName: "logs") pod "2cb973fe-c427-4569-896f-b348e869c251" (UID: "2cb973fe-c427-4569-896f-b348e869c251"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130909 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg869\" (UniqueName: \"kubernetes.io/projected/2cb973fe-c427-4569-896f-b348e869c251-kube-api-access-zg869\") pod \"2cb973fe-c427-4569-896f-b348e869c251\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130940 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-scripts\") pod \"2cb973fe-c427-4569-896f-b348e869c251\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130961 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chd4h\" (UniqueName: \"kubernetes.io/projected/81475e0c-543c-43f2-8a53-d9a1e7adcbad-kube-api-access-chd4h\") pod \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.130988 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-config-data\") pod \"a7d299ff-579b-4d68-aea6-675e740c5f69\" (UID: \"a7d299ff-579b-4d68-aea6-675e740c5f69\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131007 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-config-data\") pod \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131028 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-combined-ca-bundle\") pod \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131050 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-logs\") pod \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\" (UID: \"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131092 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-config\") pod \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\" (UID: \"81475e0c-543c-43f2-8a53-d9a1e7adcbad\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131109 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-config-data\") pod \"2cb973fe-c427-4569-896f-b348e869c251\" (UID: \"2cb973fe-c427-4569-896f-b348e869c251\") " Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131480 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d299ff-579b-4d68-aea6-675e740c5f69-logs" (OuterVolumeSpecName: "logs") pod "a7d299ff-579b-4d68-aea6-675e740c5f69" (UID: "a7d299ff-579b-4d68-aea6-675e740c5f69"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131504 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cb973fe-c427-4569-896f-b348e869c251-logs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.131979 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-scripts" (OuterVolumeSpecName: "scripts") pod "a7d299ff-579b-4d68-aea6-675e740c5f69" (UID: "a7d299ff-579b-4d68-aea6-675e740c5f69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.132469 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-config-data" (OuterVolumeSpecName: "config-data") pod "2cb973fe-c427-4569-896f-b348e869c251" (UID: "2cb973fe-c427-4569-896f-b348e869c251"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.132791 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-scripts" (OuterVolumeSpecName: "scripts") pod "2cb973fe-c427-4569-896f-b348e869c251" (UID: "2cb973fe-c427-4569-896f-b348e869c251"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.133291 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-config-data" (OuterVolumeSpecName: "config-data") pod "a7d299ff-579b-4d68-aea6-675e740c5f69" (UID: "a7d299ff-579b-4d68-aea6-675e740c5f69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.134149 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-logs" (OuterVolumeSpecName: "logs") pod "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" (UID: "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.135555 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-scripts" (OuterVolumeSpecName: "scripts") pod "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" (UID: "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.136688 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-config-data" (OuterVolumeSpecName: "config-data") pod "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" (UID: "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.137080 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d299ff-579b-4d68-aea6-675e740c5f69-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a7d299ff-579b-4d68-aea6-675e740c5f69" (UID: "a7d299ff-579b-4d68-aea6-675e740c5f69"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.137785 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" (UID: "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.140175 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-kube-api-access-5wzfb" (OuterVolumeSpecName: "kube-api-access-5wzfb") pod "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" (UID: "5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4"). InnerVolumeSpecName "kube-api-access-5wzfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.140274 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7d299ff-579b-4d68-aea6-675e740c5f69-kube-api-access-2kwjq" (OuterVolumeSpecName: "kube-api-access-2kwjq") pod "a7d299ff-579b-4d68-aea6-675e740c5f69" (UID: "a7d299ff-579b-4d68-aea6-675e740c5f69"). InnerVolumeSpecName "kube-api-access-2kwjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.140849 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81475e0c-543c-43f2-8a53-d9a1e7adcbad-kube-api-access-chd4h" (OuterVolumeSpecName: "kube-api-access-chd4h") pod "81475e0c-543c-43f2-8a53-d9a1e7adcbad" (UID: "81475e0c-543c-43f2-8a53-d9a1e7adcbad"). InnerVolumeSpecName "kube-api-access-chd4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.142992 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cb973fe-c427-4569-896f-b348e869c251-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "2cb973fe-c427-4569-896f-b348e869c251" (UID: "2cb973fe-c427-4569-896f-b348e869c251"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.147340 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cb973fe-c427-4569-896f-b348e869c251-kube-api-access-zg869" (OuterVolumeSpecName: "kube-api-access-zg869") pod "2cb973fe-c427-4569-896f-b348e869c251" (UID: "2cb973fe-c427-4569-896f-b348e869c251"). InnerVolumeSpecName "kube-api-access-zg869". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.161261 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-config" (OuterVolumeSpecName: "config") pod "81475e0c-543c-43f2-8a53-d9a1e7adcbad" (UID: "81475e0c-543c-43f2-8a53-d9a1e7adcbad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.163698 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81475e0c-543c-43f2-8a53-d9a1e7adcbad" (UID: "81475e0c-543c-43f2-8a53-d9a1e7adcbad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233324 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kwjq\" (UniqueName: \"kubernetes.io/projected/a7d299ff-579b-4d68-aea6-675e740c5f69-kube-api-access-2kwjq\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233366 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cb973fe-c427-4569-896f-b348e869c251-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233380 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233391 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a7d299ff-579b-4d68-aea6-675e740c5f69-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233405 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wzfb\" (UniqueName: \"kubernetes.io/projected/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-kube-api-access-5wzfb\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233416 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233426 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233437 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7d299ff-579b-4d68-aea6-675e740c5f69-logs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233449 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg869\" (UniqueName: \"kubernetes.io/projected/2cb973fe-c427-4569-896f-b348e869c251-kube-api-access-zg869\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233461 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233473 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chd4h\" (UniqueName: \"kubernetes.io/projected/81475e0c-543c-43f2-8a53-d9a1e7adcbad-kube-api-access-chd4h\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233486 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a7d299ff-579b-4d68-aea6-675e740c5f69-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233498 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233509 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233519 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4-logs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233529 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/81475e0c-543c-43f2-8a53-d9a1e7adcbad-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.233539 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cb973fe-c427-4569-896f-b348e869c251-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.848536 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-flx2p" event={"ID":"81475e0c-543c-43f2-8a53-d9a1e7adcbad","Type":"ContainerDied","Data":"e1444cba63199ef4e454ad5427421abae3ad1bd441e6fe7d67cb9951842f8fc0"} Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.848608 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1444cba63199ef4e454ad5427421abae3ad1bd441e6fe7d67cb9951842f8fc0" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.848616 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-flx2p" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.849865 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c64d5f989-8zd7h" event={"ID":"2cb973fe-c427-4569-896f-b348e869c251","Type":"ContainerDied","Data":"119b5cdd5e36473fd4a74e3d34cd418192b194a99bd7c3db4b4951988eb81e56"} Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.849962 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c64d5f989-8zd7h" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.855079 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-684c77c6c9-f5bwj" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.855076 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-684c77c6c9-f5bwj" event={"ID":"a7d299ff-579b-4d68-aea6-675e740c5f69","Type":"ContainerDied","Data":"9d356239a2ce34315c7fd9a9b7c3de39cd05897f761b0eb5414e55ac1769259b"} Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.856849 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c859f6869-6v8sc" event={"ID":"5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4","Type":"ContainerDied","Data":"857ec0f405d0cd0ee61ef1cc64fb51db7e2ae282b3b23330a42728c88bf7d672"} Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.856935 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c859f6869-6v8sc" Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.905764 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c64d5f989-8zd7h"] Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.933240 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6c64d5f989-8zd7h"] Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.965394 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-684c77c6c9-f5bwj"] Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.972545 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-684c77c6c9-f5bwj"] Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.988469 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c859f6869-6v8sc"] Sep 29 12:59:31 crc kubenswrapper[4611]: I0929 12:59:31.995263 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7c859f6869-6v8sc"] Sep 29 12:59:32 crc kubenswrapper[4611]: E0929 12:59:32.261602 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current-tested" Sep 29 12:59:32 crc kubenswrapper[4611]: E0929 12:59:32.261843 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current-tested" Sep 29 12:59:32 crc kubenswrapper[4611]: E0929 12:59:32.261954 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7wmdr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-nmkf9_openstack(578c0d20-ef6e-43d0-8290-1ec794d9a0ee): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 12:59:32 crc kubenswrapper[4611]: E0929 12:59:32.263602 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-nmkf9" podUID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.407469 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5db5f45875-vs8fl"] Sep 29 12:59:32 crc kubenswrapper[4611]: E0929 12:59:32.410028 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81475e0c-543c-43f2-8a53-d9a1e7adcbad" containerName="neutron-db-sync" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.410060 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="81475e0c-543c-43f2-8a53-d9a1e7adcbad" containerName="neutron-db-sync" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.410225 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="81475e0c-543c-43f2-8a53-d9a1e7adcbad" containerName="neutron-db-sync" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.412852 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.447121 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5db5f45875-vs8fl"] Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.535604 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-668bd746bd-wbxzt"] Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.538912 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.547751 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-668bd746bd-wbxzt"] Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.561291 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.561365 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-l2rkl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.561508 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.561679 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.572256 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-svc\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.572295 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-sb\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.572323 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ss57\" (UniqueName: \"kubernetes.io/projected/012ad4e9-ec08-45d6-80c8-db61653af044-kube-api-access-5ss57\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.572360 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-config\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.572413 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-swift-storage-0\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.572446 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-nb\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674443 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-nb\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674529 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-svc\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674554 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-combined-ca-bundle\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674589 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-sb\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674641 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ss57\" (UniqueName: \"kubernetes.io/projected/012ad4e9-ec08-45d6-80c8-db61653af044-kube-api-access-5ss57\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674693 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-config\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674716 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-ovndb-tls-certs\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.674748 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bjjm\" (UniqueName: \"kubernetes.io/projected/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-kube-api-access-6bjjm\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.675026 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-swift-storage-0\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.675052 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-config\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.675069 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-httpd-config\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.676344 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-config\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.679196 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-svc\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.679757 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-sb\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.680117 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-nb\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.680689 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-swift-storage-0\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.720704 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ss57\" (UniqueName: \"kubernetes.io/projected/012ad4e9-ec08-45d6-80c8-db61653af044-kube-api-access-5ss57\") pod \"dnsmasq-dns-5db5f45875-vs8fl\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.776098 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-ovndb-tls-certs\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.776143 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bjjm\" (UniqueName: \"kubernetes.io/projected/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-kube-api-access-6bjjm\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.776207 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-config\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.776228 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-httpd-config\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.776298 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-combined-ca-bundle\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.793496 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-httpd-config\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.794961 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-ovndb-tls-certs\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.795451 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-config\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.798294 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-combined-ca-bundle\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.806083 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bjjm\" (UniqueName: \"kubernetes.io/projected/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-kube-api-access-6bjjm\") pod \"neutron-668bd746bd-wbxzt\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.880255 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c9489c674-t4fp8"] Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.880456 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.886225 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerStarted","Data":"7ffd1240f4a44fa6f4c042cf9efdde3e3be729b73ab479c4ce989e68b21f341a"} Sep 29 12:59:32 crc kubenswrapper[4611]: E0929 12:59:32.910974 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current-tested\\\"\"" pod="openstack/cinder-db-sync-nmkf9" podUID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" Sep 29 12:59:32 crc kubenswrapper[4611]: I0929 12:59:32.914491 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.114575 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f7ffcb4c4-cz4zj"] Sep 29 12:59:33 crc kubenswrapper[4611]: W0929 12:59:33.179966 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66aeb2b0_254f_4c1c_b565_438e9f754366.slice/crio-a966c0a2ba0101bad71be7df8afba6226c8b21f4e8f586d1f1905d26e41f3ac2 WatchSource:0}: Error finding container a966c0a2ba0101bad71be7df8afba6226c8b21f4e8f586d1f1905d26e41f3ac2: Status 404 returned error can't find the container with id a966c0a2ba0101bad71be7df8afba6226c8b21f4e8f586d1f1905d26e41f3ac2 Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.228708 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w8cp9"] Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.388773 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.562803 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5db5f45875-vs8fl"] Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.570804 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 12:59:33 crc kubenswrapper[4611]: W0929 12:59:33.579935 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9339db1_8dcb_4435_94f5_ac29a7ae99a0.slice/crio-a1f860fa94da5ad692353d8f5c0db4a27c58e1e8bcfac88c96a78fc3041acb7c WatchSource:0}: Error finding container a1f860fa94da5ad692353d8f5c0db4a27c58e1e8bcfac88c96a78fc3041acb7c: Status 404 returned error can't find the container with id a1f860fa94da5ad692353d8f5c0db4a27c58e1e8bcfac88c96a78fc3041acb7c Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.791529 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cb973fe-c427-4569-896f-b348e869c251" path="/var/lib/kubelet/pods/2cb973fe-c427-4569-896f-b348e869c251/volumes" Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.793469 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4" path="/var/lib/kubelet/pods/5d680a0e-2058-48d2-8ce8-5e9ae60ef8a4/volumes" Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.796961 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7d299ff-579b-4d68-aea6-675e740c5f69" path="/var/lib/kubelet/pods/a7d299ff-579b-4d68-aea6-675e740c5f69/volumes" Sep 29 12:59:33 crc kubenswrapper[4611]: I0929 12:59:33.859488 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-668bd746bd-wbxzt"] Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:33.998832 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"51a74707-06e6-48d9-8636-a921a4a559e6","Type":"ContainerStarted","Data":"5ad54c319e9724641d037396bcd99698330f30231353e15ad276411218349c6c"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.032185 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerStarted","Data":"245cf191c2b0db624f95eaecf3ce3924c26650437e96f0b9ceaa53f75873dcd1"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.042583 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w8cp9" event={"ID":"d1ff376d-1d04-41b1-a417-6de011ef3054","Type":"ContainerStarted","Data":"81ce465819ffef39bf0463e75ccb04256e692fd254c40f8f1acefc1308420fff"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.042676 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w8cp9" event={"ID":"d1ff376d-1d04-41b1-a417-6de011ef3054","Type":"ContainerStarted","Data":"3c7cabf3b14f03e8ce19966121f52620238edf908281ef53af9a9fd82ec97c8b"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.077203 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerStarted","Data":"0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.077656 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerStarted","Data":"a966c0a2ba0101bad71be7df8afba6226c8b21f4e8f586d1f1905d26e41f3ac2"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.104512 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a9339db1-8dcb-4435-94f5-ac29a7ae99a0","Type":"ContainerStarted","Data":"a1f860fa94da5ad692353d8f5c0db4a27c58e1e8bcfac88c96a78fc3041acb7c"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.109550 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-668bd746bd-wbxzt" event={"ID":"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7","Type":"ContainerStarted","Data":"75d844b2fe35c88917989b8635f0942bfa9072f3c15df8485970f953bf0ddda7"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.131288 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-w8cp9" podStartSLOduration=14.131262464 podStartE2EDuration="14.131262464s" podCreationTimestamp="2025-09-29 12:59:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:34.083129676 +0000 UTC m=+1160.974649282" watchObservedRunningTime="2025-09-29 12:59:34.131262464 +0000 UTC m=+1161.022782070" Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.131411 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerStarted","Data":"02e3749cae440178d08377420327e1a0040c2355a5bcef666f74ba88526fabe1"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.135490 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8jjnd" event={"ID":"f5e45783-38de-4e39-9ad8-3da9ec111aa9","Type":"ContainerStarted","Data":"995a252886b9af34197d378a57203af56537bbb7c62567c8b57155cb153eaa65"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.142463 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" event={"ID":"012ad4e9-ec08-45d6-80c8-db61653af044","Type":"ContainerStarted","Data":"d1e8f6cca58c6d88bdc9f9d1a607f7bf5cfa1ca0f9d32db0bc9d14632eabab82"} Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.175995 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-8jjnd" podStartSLOduration=4.412908201 podStartE2EDuration="40.175976943s" podCreationTimestamp="2025-09-29 12:58:54 +0000 UTC" firstStartedPulling="2025-09-29 12:58:56.74582581 +0000 UTC m=+1123.637345416" lastFinishedPulling="2025-09-29 12:59:32.508894552 +0000 UTC m=+1159.400414158" observedRunningTime="2025-09-29 12:59:34.168949881 +0000 UTC m=+1161.060469487" watchObservedRunningTime="2025-09-29 12:59:34.175976943 +0000 UTC m=+1161.067496549" Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.629004 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 12:59:34 crc kubenswrapper[4611]: I0929 12:59:34.629356 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.159665 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerStarted","Data":"d741cd0ee1243c3bc0d72e207169fa52be409271801e3754c56404673f372a6e"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.187036 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"51a74707-06e6-48d9-8636-a921a4a559e6","Type":"ContainerStarted","Data":"cdd5f8b20832ce876cb2582356fe5b5b757d58bd4fb563981d1946788e6c232f"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.204909 4611 generic.go:334] "Generic (PLEG): container finished" podID="012ad4e9-ec08-45d6-80c8-db61653af044" containerID="93a4d0c596c05a59b0dc6ed24ddb36557d0e89be817684de3d920da49315aa7a" exitCode=0 Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.204995 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" event={"ID":"012ad4e9-ec08-45d6-80c8-db61653af044","Type":"ContainerStarted","Data":"99ce548ec2224936914a5fd77a487bfb296306c9755e9439626519d4eed51f39"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.205018 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" event={"ID":"012ad4e9-ec08-45d6-80c8-db61653af044","Type":"ContainerDied","Data":"93a4d0c596c05a59b0dc6ed24ddb36557d0e89be817684de3d920da49315aa7a"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.205914 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.207143 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a9339db1-8dcb-4435-94f5-ac29a7ae99a0","Type":"ContainerStarted","Data":"8ff2a9cb06cc53485bc66968bad04c7630d353190ab6e69e2516420fd3f9e5d1"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.209673 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-668bd746bd-wbxzt" event={"ID":"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7","Type":"ContainerStarted","Data":"7d5064836c84d7385443320df67675a8be13b21da8d1dad74c305ec1ea9338c9"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.223419 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerStarted","Data":"d6fee3b5d5de62a3b7b8286065e555d624605989eba216898d5cc83f4d5aa788"} Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.242841 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" podStartSLOduration=3.242819957 podStartE2EDuration="3.242819957s" podCreationTimestamp="2025-09-29 12:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:35.239667536 +0000 UTC m=+1162.131187152" watchObservedRunningTime="2025-09-29 12:59:35.242819957 +0000 UTC m=+1162.134339563" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.247986 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podStartSLOduration=27.247967525 podStartE2EDuration="27.247967525s" podCreationTimestamp="2025-09-29 12:59:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:35.201121424 +0000 UTC m=+1162.092641040" watchObservedRunningTime="2025-09-29 12:59:35.247967525 +0000 UTC m=+1162.139487131" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.284404 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5c9489c674-t4fp8" podStartSLOduration=27.005643429 podStartE2EDuration="27.284363835s" podCreationTimestamp="2025-09-29 12:59:08 +0000 UTC" firstStartedPulling="2025-09-29 12:59:32.876034469 +0000 UTC m=+1159.767554075" lastFinishedPulling="2025-09-29 12:59:33.154754875 +0000 UTC m=+1160.046274481" observedRunningTime="2025-09-29 12:59:35.277743454 +0000 UTC m=+1162.169263070" watchObservedRunningTime="2025-09-29 12:59:35.284363835 +0000 UTC m=+1162.175883441" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.294124 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-697f494f47-d2wfq"] Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.295644 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.297659 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.316504 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.322668 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-697f494f47-d2wfq"] Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413315 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-combined-ca-bundle\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413378 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-httpd-config\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413431 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-ovndb-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413448 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-public-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413473 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-config\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413496 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-internal-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.413570 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8qwr\" (UniqueName: \"kubernetes.io/projected/d27ae389-a34b-48a0-b349-8ff7e3268e40-kube-api-access-z8qwr\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514676 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-ovndb-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514725 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-public-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514754 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-config\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514805 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-internal-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514910 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8qwr\" (UniqueName: \"kubernetes.io/projected/d27ae389-a34b-48a0-b349-8ff7e3268e40-kube-api-access-z8qwr\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514951 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-combined-ca-bundle\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.514994 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-httpd-config\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.519203 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-public-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.519976 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-httpd-config\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.520507 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-internal-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.521429 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-ovndb-tls-certs\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.522112 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-config\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.530140 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27ae389-a34b-48a0-b349-8ff7e3268e40-combined-ca-bundle\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.535936 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8qwr\" (UniqueName: \"kubernetes.io/projected/d27ae389-a34b-48a0-b349-8ff7e3268e40-kube-api-access-z8qwr\") pod \"neutron-697f494f47-d2wfq\" (UID: \"d27ae389-a34b-48a0-b349-8ff7e3268e40\") " pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:35 crc kubenswrapper[4611]: I0929 12:59:35.616732 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:36 crc kubenswrapper[4611]: I0929 12:59:36.166978 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-697f494f47-d2wfq"] Sep 29 12:59:36 crc kubenswrapper[4611]: I0929 12:59:36.240432 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-697f494f47-d2wfq" event={"ID":"d27ae389-a34b-48a0-b349-8ff7e3268e40","Type":"ContainerStarted","Data":"473c670ff6b5ab53206f18a6f111ca9408b12f80a6ba4dcc06641460b61fb6fb"} Sep 29 12:59:37 crc kubenswrapper[4611]: I0929 12:59:37.254090 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-668bd746bd-wbxzt" event={"ID":"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7","Type":"ContainerStarted","Data":"614014d53528961e58f380bbac47797f4939d92d8e5ecd17843ea81902156172"} Sep 29 12:59:37 crc kubenswrapper[4611]: I0929 12:59:37.254484 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 12:59:37 crc kubenswrapper[4611]: I0929 12:59:37.263956 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-697f494f47-d2wfq" event={"ID":"d27ae389-a34b-48a0-b349-8ff7e3268e40","Type":"ContainerStarted","Data":"4fac4d29492be970c94dea18ebdf9a80776a6a74b14149e1dbed1fb046824a43"} Sep 29 12:59:37 crc kubenswrapper[4611]: I0929 12:59:37.270476 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"51a74707-06e6-48d9-8636-a921a4a559e6","Type":"ContainerStarted","Data":"5d71ea815ea44b121494615ef8733654a74798a7d69d693916dd0367cc2776c1"} Sep 29 12:59:37 crc kubenswrapper[4611]: I0929 12:59:37.294430 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-668bd746bd-wbxzt" podStartSLOduration=5.294411515 podStartE2EDuration="5.294411515s" podCreationTimestamp="2025-09-29 12:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:37.282948155 +0000 UTC m=+1164.174467761" watchObservedRunningTime="2025-09-29 12:59:37.294411515 +0000 UTC m=+1164.185931121" Sep 29 12:59:37 crc kubenswrapper[4611]: I0929 12:59:37.336298 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=17.336275433 podStartE2EDuration="17.336275433s" podCreationTimestamp="2025-09-29 12:59:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:37.309256704 +0000 UTC m=+1164.200776320" watchObservedRunningTime="2025-09-29 12:59:37.336275433 +0000 UTC m=+1164.227795039" Sep 29 12:59:38 crc kubenswrapper[4611]: I0929 12:59:38.283989 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-697f494f47-d2wfq" event={"ID":"d27ae389-a34b-48a0-b349-8ff7e3268e40","Type":"ContainerStarted","Data":"e03afbba6a2693b975cd9f8f70a558b9c6c55b0ba7cdb288146f31171f829430"} Sep 29 12:59:38 crc kubenswrapper[4611]: I0929 12:59:38.466408 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:38 crc kubenswrapper[4611]: I0929 12:59:38.467572 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 12:59:38 crc kubenswrapper[4611]: I0929 12:59:38.561597 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:38 crc kubenswrapper[4611]: I0929 12:59:38.561657 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 12:59:39 crc kubenswrapper[4611]: I0929 12:59:39.294745 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9sjsz" event={"ID":"adc1b4a7-82e6-42fa-9117-96db1ddcda7c","Type":"ContainerStarted","Data":"ca0943188ff5b89702704b6a1ca55b982bd8a550ac85bb3071accd3b8539007e"} Sep 29 12:59:39 crc kubenswrapper[4611]: I0929 12:59:39.297048 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a9339db1-8dcb-4435-94f5-ac29a7ae99a0","Type":"ContainerStarted","Data":"088f88043c2c3d9ba57ea71f288395d013f2f40f3b12e33af2a09aed6da4c69a"} Sep 29 12:59:39 crc kubenswrapper[4611]: I0929 12:59:39.299268 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerStarted","Data":"50c6d6fbcacbc473633626cefd5f06aca330aed877d600da176b9c172dd4b32a"} Sep 29 12:59:39 crc kubenswrapper[4611]: I0929 12:59:39.299652 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-697f494f47-d2wfq" Sep 29 12:59:39 crc kubenswrapper[4611]: I0929 12:59:39.317208 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-9sjsz" podStartSLOduration=4.142634298 podStartE2EDuration="45.317191504s" podCreationTimestamp="2025-09-29 12:58:54 +0000 UTC" firstStartedPulling="2025-09-29 12:58:56.824039715 +0000 UTC m=+1123.715559321" lastFinishedPulling="2025-09-29 12:59:37.998596921 +0000 UTC m=+1164.890116527" observedRunningTime="2025-09-29 12:59:39.313349304 +0000 UTC m=+1166.204868910" watchObservedRunningTime="2025-09-29 12:59:39.317191504 +0000 UTC m=+1166.208711110" Sep 29 12:59:39 crc kubenswrapper[4611]: I0929 12:59:39.335441 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-697f494f47-d2wfq" podStartSLOduration=4.33542288 podStartE2EDuration="4.33542288s" podCreationTimestamp="2025-09-29 12:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:39.333930417 +0000 UTC m=+1166.225450033" watchObservedRunningTime="2025-09-29 12:59:39.33542288 +0000 UTC m=+1166.226942486" Sep 29 12:59:40 crc kubenswrapper[4611]: I0929 12:59:40.346259 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=20.346238917 podStartE2EDuration="20.346238917s" podCreationTimestamp="2025-09-29 12:59:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:40.342240502 +0000 UTC m=+1167.233760108" watchObservedRunningTime="2025-09-29 12:59:40.346238917 +0000 UTC m=+1167.237758523" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.508253 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.508645 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.536539 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.536638 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.558854 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.564940 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.569458 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:41 crc kubenswrapper[4611]: I0929 12:59:41.632236 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.335566 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.335596 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.335607 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.335615 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.882860 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.960320 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b878cb6dc-jtk8m"] Sep 29 12:59:42 crc kubenswrapper[4611]: I0929 12:59:42.960778 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="dnsmasq-dns" containerID="cri-o://3b225203218ca9e7e7971a354a5cdc7764227140693510ffdbed658d847b0f42" gracePeriod=10 Sep 29 12:59:43 crc kubenswrapper[4611]: I0929 12:59:43.356479 4611 generic.go:334] "Generic (PLEG): container finished" podID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerID="3b225203218ca9e7e7971a354a5cdc7764227140693510ffdbed658d847b0f42" exitCode=0 Sep 29 12:59:43 crc kubenswrapper[4611]: I0929 12:59:43.356573 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" event={"ID":"28aa84b8-5636-4f83-bc39-c62f20505d77","Type":"ContainerDied","Data":"3b225203218ca9e7e7971a354a5cdc7764227140693510ffdbed658d847b0f42"} Sep 29 12:59:44 crc kubenswrapper[4611]: I0929 12:59:44.370656 4611 generic.go:334] "Generic (PLEG): container finished" podID="d1ff376d-1d04-41b1-a417-6de011ef3054" containerID="81ce465819ffef39bf0463e75ccb04256e692fd254c40f8f1acefc1308420fff" exitCode=0 Sep 29 12:59:44 crc kubenswrapper[4611]: I0929 12:59:44.370772 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w8cp9" event={"ID":"d1ff376d-1d04-41b1-a417-6de011ef3054","Type":"ContainerDied","Data":"81ce465819ffef39bf0463e75ccb04256e692fd254c40f8f1acefc1308420fff"} Sep 29 12:59:46 crc kubenswrapper[4611]: I0929 12:59:46.391474 4611 generic.go:334] "Generic (PLEG): container finished" podID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" containerID="995a252886b9af34197d378a57203af56537bbb7c62567c8b57155cb153eaa65" exitCode=0 Sep 29 12:59:46 crc kubenswrapper[4611]: I0929 12:59:46.391552 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8jjnd" event={"ID":"f5e45783-38de-4e39-9ad8-3da9ec111aa9","Type":"ContainerDied","Data":"995a252886b9af34197d378a57203af56537bbb7c62567c8b57155cb153eaa65"} Sep 29 12:59:46 crc kubenswrapper[4611]: I0929 12:59:46.598520 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.148:5353: connect: connection refused" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.468691 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.521661 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.530226 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8jjnd" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.565607 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.616545 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617264 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617293 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkgjs\" (UniqueName: \"kubernetes.io/projected/f5e45783-38de-4e39-9ad8-3da9ec111aa9-kube-api-access-kkgjs\") pod \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617398 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5e45783-38de-4e39-9ad8-3da9ec111aa9-logs\") pod \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617428 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-scripts\") pod \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617445 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-scripts\") pod \"d1ff376d-1d04-41b1-a417-6de011ef3054\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617466 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-credential-keys\") pod \"d1ff376d-1d04-41b1-a417-6de011ef3054\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617519 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-combined-ca-bundle\") pod \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617541 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-config-data\") pod \"d1ff376d-1d04-41b1-a417-6de011ef3054\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617614 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-combined-ca-bundle\") pod \"d1ff376d-1d04-41b1-a417-6de011ef3054\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617656 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-fernet-keys\") pod \"d1ff376d-1d04-41b1-a417-6de011ef3054\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617749 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-config-data\") pod \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\" (UID: \"f5e45783-38de-4e39-9ad8-3da9ec111aa9\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.617771 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvqc5\" (UniqueName: \"kubernetes.io/projected/d1ff376d-1d04-41b1-a417-6de011ef3054-kube-api-access-bvqc5\") pod \"d1ff376d-1d04-41b1-a417-6de011ef3054\" (UID: \"d1ff376d-1d04-41b1-a417-6de011ef3054\") " Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.618563 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.618609 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e45783-38de-4e39-9ad8-3da9ec111aa9-logs" (OuterVolumeSpecName: "logs") pod "f5e45783-38de-4e39-9ad8-3da9ec111aa9" (UID: "f5e45783-38de-4e39-9ad8-3da9ec111aa9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.629688 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1ff376d-1d04-41b1-a417-6de011ef3054-kube-api-access-bvqc5" (OuterVolumeSpecName: "kube-api-access-bvqc5") pod "d1ff376d-1d04-41b1-a417-6de011ef3054" (UID: "d1ff376d-1d04-41b1-a417-6de011ef3054"). InnerVolumeSpecName "kube-api-access-bvqc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.631163 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-scripts" (OuterVolumeSpecName: "scripts") pod "f5e45783-38de-4e39-9ad8-3da9ec111aa9" (UID: "f5e45783-38de-4e39-9ad8-3da9ec111aa9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.631203 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-scripts" (OuterVolumeSpecName: "scripts") pod "d1ff376d-1d04-41b1-a417-6de011ef3054" (UID: "d1ff376d-1d04-41b1-a417-6de011ef3054"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.631754 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e45783-38de-4e39-9ad8-3da9ec111aa9-kube-api-access-kkgjs" (OuterVolumeSpecName: "kube-api-access-kkgjs") pod "f5e45783-38de-4e39-9ad8-3da9ec111aa9" (UID: "f5e45783-38de-4e39-9ad8-3da9ec111aa9"). InnerVolumeSpecName "kube-api-access-kkgjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.632644 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d1ff376d-1d04-41b1-a417-6de011ef3054" (UID: "d1ff376d-1d04-41b1-a417-6de011ef3054"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.643846 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d1ff376d-1d04-41b1-a417-6de011ef3054" (UID: "d1ff376d-1d04-41b1-a417-6de011ef3054"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.655983 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.670783 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.681793 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-config-data" (OuterVolumeSpecName: "config-data") pod "f5e45783-38de-4e39-9ad8-3da9ec111aa9" (UID: "f5e45783-38de-4e39-9ad8-3da9ec111aa9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.683779 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1ff376d-1d04-41b1-a417-6de011ef3054" (UID: "d1ff376d-1d04-41b1-a417-6de011ef3054"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.685678 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-config-data" (OuterVolumeSpecName: "config-data") pod "d1ff376d-1d04-41b1-a417-6de011ef3054" (UID: "d1ff376d-1d04-41b1-a417-6de011ef3054"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.695687 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5e45783-38de-4e39-9ad8-3da9ec111aa9" (UID: "f5e45783-38de-4e39-9ad8-3da9ec111aa9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.724947 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5e45783-38de-4e39-9ad8-3da9ec111aa9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.724978 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.724988 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.724997 4611 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725006 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725014 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725022 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725030 4611 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1ff376d-1d04-41b1-a417-6de011ef3054-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725038 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5e45783-38de-4e39-9ad8-3da9ec111aa9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725046 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvqc5\" (UniqueName: \"kubernetes.io/projected/d1ff376d-1d04-41b1-a417-6de011ef3054-kube-api-access-bvqc5\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:48 crc kubenswrapper[4611]: I0929 12:59:48.725054 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkgjs\" (UniqueName: \"kubernetes.io/projected/f5e45783-38de-4e39-9ad8-3da9ec111aa9-kube-api-access-kkgjs\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.030075 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.142565 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-config\") pod \"28aa84b8-5636-4f83-bc39-c62f20505d77\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.142943 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-svc\") pod \"28aa84b8-5636-4f83-bc39-c62f20505d77\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.143038 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-sb\") pod \"28aa84b8-5636-4f83-bc39-c62f20505d77\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.143078 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-nb\") pod \"28aa84b8-5636-4f83-bc39-c62f20505d77\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.143127 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-swift-storage-0\") pod \"28aa84b8-5636-4f83-bc39-c62f20505d77\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.143183 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcd78\" (UniqueName: \"kubernetes.io/projected/28aa84b8-5636-4f83-bc39-c62f20505d77-kube-api-access-fcd78\") pod \"28aa84b8-5636-4f83-bc39-c62f20505d77\" (UID: \"28aa84b8-5636-4f83-bc39-c62f20505d77\") " Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.150966 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28aa84b8-5636-4f83-bc39-c62f20505d77-kube-api-access-fcd78" (OuterVolumeSpecName: "kube-api-access-fcd78") pod "28aa84b8-5636-4f83-bc39-c62f20505d77" (UID: "28aa84b8-5636-4f83-bc39-c62f20505d77"). InnerVolumeSpecName "kube-api-access-fcd78". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.242820 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "28aa84b8-5636-4f83-bc39-c62f20505d77" (UID: "28aa84b8-5636-4f83-bc39-c62f20505d77"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.249782 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.249813 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcd78\" (UniqueName: \"kubernetes.io/projected/28aa84b8-5636-4f83-bc39-c62f20505d77-kube-api-access-fcd78\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.271344 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-config" (OuterVolumeSpecName: "config") pod "28aa84b8-5636-4f83-bc39-c62f20505d77" (UID: "28aa84b8-5636-4f83-bc39-c62f20505d77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.279056 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "28aa84b8-5636-4f83-bc39-c62f20505d77" (UID: "28aa84b8-5636-4f83-bc39-c62f20505d77"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.304088 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "28aa84b8-5636-4f83-bc39-c62f20505d77" (UID: "28aa84b8-5636-4f83-bc39-c62f20505d77"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.306195 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "28aa84b8-5636-4f83-bc39-c62f20505d77" (UID: "28aa84b8-5636-4f83-bc39-c62f20505d77"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.351942 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.351987 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-config\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.351999 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.352011 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28aa84b8-5636-4f83-bc39-c62f20505d77-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.456280 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerStarted","Data":"1e3bef10a9d8fb2819a441ef364c47bf65035e558208a0996d9977db26e60250"} Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.457724 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8jjnd" event={"ID":"f5e45783-38de-4e39-9ad8-3da9ec111aa9","Type":"ContainerDied","Data":"f85cbdf987571209b4c44fb0f560a66d3476ada3803b3ef43b476a274b0d9188"} Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.457755 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f85cbdf987571209b4c44fb0f560a66d3476ada3803b3ef43b476a274b0d9188" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.457817 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8jjnd" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.478350 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w8cp9" event={"ID":"d1ff376d-1d04-41b1-a417-6de011ef3054","Type":"ContainerDied","Data":"3c7cabf3b14f03e8ce19966121f52620238edf908281ef53af9a9fd82ec97c8b"} Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.478391 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c7cabf3b14f03e8ce19966121f52620238edf908281ef53af9a9fd82ec97c8b" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.478443 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w8cp9" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.508827 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" event={"ID":"28aa84b8-5636-4f83-bc39-c62f20505d77","Type":"ContainerDied","Data":"6f79fa1b52efacc7821e888f47ed9c0bc33fb8dab6c0e67a6dd60812b0896a92"} Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.508867 4611 scope.go:117] "RemoveContainer" containerID="3b225203218ca9e7e7971a354a5cdc7764227140693510ffdbed658d847b0f42" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.512059 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b878cb6dc-jtk8m" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.586102 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b878cb6dc-jtk8m"] Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.625702 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b878cb6dc-jtk8m"] Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.638557 4611 scope.go:117] "RemoveContainer" containerID="4e0fadbcc97158d47b8f11427708aecb4d7d031d411bb56cfb48ed3027bb5aeb" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.756107 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" path="/var/lib/kubelet/pods/28aa84b8-5636-4f83-bc39-c62f20505d77/volumes" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.756705 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-84bd74d746-h92xg"] Sep 29 12:59:49 crc kubenswrapper[4611]: E0929 12:59:49.757039 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" containerName="placement-db-sync" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757063 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" containerName="placement-db-sync" Sep 29 12:59:49 crc kubenswrapper[4611]: E0929 12:59:49.757099 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="init" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757109 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="init" Sep 29 12:59:49 crc kubenswrapper[4611]: E0929 12:59:49.757123 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1ff376d-1d04-41b1-a417-6de011ef3054" containerName="keystone-bootstrap" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757131 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1ff376d-1d04-41b1-a417-6de011ef3054" containerName="keystone-bootstrap" Sep 29 12:59:49 crc kubenswrapper[4611]: E0929 12:59:49.757156 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="dnsmasq-dns" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757162 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="dnsmasq-dns" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757316 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="28aa84b8-5636-4f83-bc39-c62f20505d77" containerName="dnsmasq-dns" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757335 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" containerName="placement-db-sync" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.757344 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1ff376d-1d04-41b1-a417-6de011ef3054" containerName="keystone-bootstrap" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.761715 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.769655 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.769899 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.769935 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.770006 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.770014 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cs8gt" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.770121 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.809188 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84bd74d746-h92xg"] Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.848971 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-f475d4f88-9gwgs"] Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.851151 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.858665 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.858703 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.858847 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-zns9h" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.859768 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.863831 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866294 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-internal-tls-certs\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866353 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-fernet-keys\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866387 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-combined-ca-bundle\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866417 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-scripts\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866442 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-config-data\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866465 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-public-tls-certs\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866488 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxpnn\" (UniqueName: \"kubernetes.io/projected/81599dc0-616b-43ad-91ef-c033d30f1892-kube-api-access-sxpnn\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.866508 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-credential-keys\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.871857 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f475d4f88-9gwgs"] Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.968764 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-combined-ca-bundle\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.968868 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-config-data\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.968911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqlrg\" (UniqueName: \"kubernetes.io/projected/6fd6bde4-17d7-4854-9238-4492968338d8-kube-api-access-vqlrg\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.968940 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-public-tls-certs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.968966 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-internal-tls-certs\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969003 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-fernet-keys\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969034 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-combined-ca-bundle\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969055 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-internal-tls-certs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969077 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-scripts\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969096 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-config-data\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969112 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-public-tls-certs\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969129 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxpnn\" (UniqueName: \"kubernetes.io/projected/81599dc0-616b-43ad-91ef-c033d30f1892-kube-api-access-sxpnn\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969145 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-credential-keys\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969176 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-scripts\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.969201 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd6bde4-17d7-4854-9238-4492968338d8-logs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.978492 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-combined-ca-bundle\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.978926 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-internal-tls-certs\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.979351 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-config-data\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.989608 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-scripts\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.991191 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-credential-keys\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.993767 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-fernet-keys\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.995681 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81599dc0-616b-43ad-91ef-c033d30f1892-public-tls-certs\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:49 crc kubenswrapper[4611]: I0929 12:59:49.996210 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxpnn\" (UniqueName: \"kubernetes.io/projected/81599dc0-616b-43ad-91ef-c033d30f1892-kube-api-access-sxpnn\") pod \"keystone-84bd74d746-h92xg\" (UID: \"81599dc0-616b-43ad-91ef-c033d30f1892\") " pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.070867 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqlrg\" (UniqueName: \"kubernetes.io/projected/6fd6bde4-17d7-4854-9238-4492968338d8-kube-api-access-vqlrg\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.070918 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-public-tls-certs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.072182 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-internal-tls-certs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.072638 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-scripts\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.072664 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd6bde4-17d7-4854-9238-4492968338d8-logs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.072685 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-combined-ca-bundle\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.072733 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-config-data\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.073364 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd6bde4-17d7-4854-9238-4492968338d8-logs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.074993 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-public-tls-certs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.080948 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-internal-tls-certs\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.082289 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-scripts\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.082744 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-combined-ca-bundle\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.083233 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd6bde4-17d7-4854-9238-4492968338d8-config-data\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.103246 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqlrg\" (UniqueName: \"kubernetes.io/projected/6fd6bde4-17d7-4854-9238-4492968338d8-kube-api-access-vqlrg\") pod \"placement-f475d4f88-9gwgs\" (UID: \"6fd6bde4-17d7-4854-9238-4492968338d8\") " pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.106529 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.176943 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.547730 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nmkf9" event={"ID":"578c0d20-ef6e-43d0-8290-1ec794d9a0ee","Type":"ContainerStarted","Data":"de5ed9e3669155a1741ee806c28b219f73255f7bca033bc129a01ef986b8f519"} Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.567355 4611 generic.go:334] "Generic (PLEG): container finished" podID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" containerID="ca0943188ff5b89702704b6a1ca55b982bd8a550ac85bb3071accd3b8539007e" exitCode=0 Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.567736 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9sjsz" event={"ID":"adc1b4a7-82e6-42fa-9117-96db1ddcda7c","Type":"ContainerDied","Data":"ca0943188ff5b89702704b6a1ca55b982bd8a550ac85bb3071accd3b8539007e"} Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.577730 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-nmkf9" podStartSLOduration=4.220955717 podStartE2EDuration="56.57771452s" podCreationTimestamp="2025-09-29 12:58:54 +0000 UTC" firstStartedPulling="2025-09-29 12:58:56.362414804 +0000 UTC m=+1123.253934410" lastFinishedPulling="2025-09-29 12:59:48.719173607 +0000 UTC m=+1175.610693213" observedRunningTime="2025-09-29 12:59:50.576861535 +0000 UTC m=+1177.468381141" watchObservedRunningTime="2025-09-29 12:59:50.57771452 +0000 UTC m=+1177.469234126" Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.668164 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84bd74d746-h92xg"] Sep 29 12:59:50 crc kubenswrapper[4611]: I0929 12:59:50.820936 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f475d4f88-9gwgs"] Sep 29 12:59:50 crc kubenswrapper[4611]: W0929 12:59:50.828289 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fd6bde4_17d7_4854_9238_4492968338d8.slice/crio-999a544dcf6e9ea3a4a92dc0ef1eacf17969f2ac5627dbff61f3efc5f6c35496 WatchSource:0}: Error finding container 999a544dcf6e9ea3a4a92dc0ef1eacf17969f2ac5627dbff61f3efc5f6c35496: Status 404 returned error can't find the container with id 999a544dcf6e9ea3a4a92dc0ef1eacf17969f2ac5627dbff61f3efc5f6c35496 Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.593579 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f475d4f88-9gwgs" event={"ID":"6fd6bde4-17d7-4854-9238-4492968338d8","Type":"ContainerStarted","Data":"005bc1b29d792e996af611a80675027f492981621a6062418c5e4cefc1ccd8fe"} Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.594156 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f475d4f88-9gwgs" event={"ID":"6fd6bde4-17d7-4854-9238-4492968338d8","Type":"ContainerStarted","Data":"ed99ad2af790169806eb3f64c68e1720a4683aaf2aca8afdb74ed50fb5f6d69c"} Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.594170 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f475d4f88-9gwgs" event={"ID":"6fd6bde4-17d7-4854-9238-4492968338d8","Type":"ContainerStarted","Data":"999a544dcf6e9ea3a4a92dc0ef1eacf17969f2ac5627dbff61f3efc5f6c35496"} Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.595256 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.595284 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-f475d4f88-9gwgs" Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.601082 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84bd74d746-h92xg" event={"ID":"81599dc0-616b-43ad-91ef-c033d30f1892","Type":"ContainerStarted","Data":"7d42e8157214fd3e4722e8730179b237596bc7cf5468d7062436aec4ac1db87d"} Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.601124 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84bd74d746-h92xg" event={"ID":"81599dc0-616b-43ad-91ef-c033d30f1892","Type":"ContainerStarted","Data":"957d77c1cf015b4c7a0e4af0bc513e644f2734cccdb0ae7f390f5cbd69f8b808"} Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.657230 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-f475d4f88-9gwgs" podStartSLOduration=2.6572157770000002 podStartE2EDuration="2.657215777s" podCreationTimestamp="2025-09-29 12:59:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:51.629751775 +0000 UTC m=+1178.521271381" watchObservedRunningTime="2025-09-29 12:59:51.657215777 +0000 UTC m=+1178.548735383" Sep 29 12:59:51 crc kubenswrapper[4611]: I0929 12:59:51.666606 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-84bd74d746-h92xg" podStartSLOduration=2.666585887 podStartE2EDuration="2.666585887s" podCreationTimestamp="2025-09-29 12:59:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:51.657143645 +0000 UTC m=+1178.548663251" watchObservedRunningTime="2025-09-29 12:59:51.666585887 +0000 UTC m=+1178.558105493" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.120086 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.232266 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-combined-ca-bundle\") pod \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.232431 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snlx8\" (UniqueName: \"kubernetes.io/projected/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-kube-api-access-snlx8\") pod \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.232504 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-db-sync-config-data\") pod \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\" (UID: \"adc1b4a7-82e6-42fa-9117-96db1ddcda7c\") " Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.262907 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "adc1b4a7-82e6-42fa-9117-96db1ddcda7c" (UID: "adc1b4a7-82e6-42fa-9117-96db1ddcda7c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.262938 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-kube-api-access-snlx8" (OuterVolumeSpecName: "kube-api-access-snlx8") pod "adc1b4a7-82e6-42fa-9117-96db1ddcda7c" (UID: "adc1b4a7-82e6-42fa-9117-96db1ddcda7c"). InnerVolumeSpecName "kube-api-access-snlx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.284533 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "adc1b4a7-82e6-42fa-9117-96db1ddcda7c" (UID: "adc1b4a7-82e6-42fa-9117-96db1ddcda7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.337305 4611 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.337335 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.337345 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snlx8\" (UniqueName: \"kubernetes.io/projected/adc1b4a7-82e6-42fa-9117-96db1ddcda7c-kube-api-access-snlx8\") on node \"crc\" DevicePath \"\"" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.626270 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9sjsz" event={"ID":"adc1b4a7-82e6-42fa-9117-96db1ddcda7c","Type":"ContainerDied","Data":"a26e72fe575737452162aa497c6288f64efe6b3c13eaf79bf2f44f58e9b6bc96"} Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.626323 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a26e72fe575737452162aa497c6288f64efe6b3c13eaf79bf2f44f58e9b6bc96" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.626392 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9sjsz" Sep 29 12:59:52 crc kubenswrapper[4611]: I0929 12:59:52.627058 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-84bd74d746-h92xg" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.043001 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7447bc4958-7qrwm"] Sep 29 12:59:53 crc kubenswrapper[4611]: E0929 12:59:53.043820 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" containerName="barbican-db-sync" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.043841 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" containerName="barbican-db-sync" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.044109 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" containerName="barbican-db-sync" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.045058 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.052804 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.053083 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.053301 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-9n8gl" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.054773 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-fbf8688df-k8w9k"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.061941 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.079925 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.080886 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7447bc4958-7qrwm"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.112549 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-fbf8688df-k8w9k"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.133969 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69bc85cf77-brbf5"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.135746 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.185790 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7675325b-f3c9-47e9-9992-bdc23d3a761f-logs\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.185866 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-config-data\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.185912 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-combined-ca-bundle\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.185968 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd065838-835d-4f4d-aa6f-27cde218b024-logs\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186003 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-config-data-custom\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186056 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvjst\" (UniqueName: \"kubernetes.io/projected/bd065838-835d-4f4d-aa6f-27cde218b024-kube-api-access-rvjst\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186102 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-config-data-custom\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186151 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ksqm\" (UniqueName: \"kubernetes.io/projected/7675325b-f3c9-47e9-9992-bdc23d3a761f-kube-api-access-5ksqm\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186179 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-combined-ca-bundle\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186207 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-config-data\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.186355 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69bc85cf77-brbf5"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.273222 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-666b746458-tsmhw"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.275451 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.281986 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-config-data\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287467 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-swift-storage-0\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287506 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7675325b-f3c9-47e9-9992-bdc23d3a761f-logs\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287553 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-svc\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287583 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-config-data\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287748 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-combined-ca-bundle\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287809 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-config\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287838 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd065838-835d-4f4d-aa6f-27cde218b024-logs\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287873 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-config-data-custom\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287907 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvjst\" (UniqueName: \"kubernetes.io/projected/bd065838-835d-4f4d-aa6f-27cde218b024-kube-api-access-rvjst\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287949 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-config-data-custom\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.287974 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-sb\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.288002 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-nb\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.288022 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c5k6\" (UniqueName: \"kubernetes.io/projected/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-kube-api-access-2c5k6\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.288059 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ksqm\" (UniqueName: \"kubernetes.io/projected/7675325b-f3c9-47e9-9992-bdc23d3a761f-kube-api-access-5ksqm\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.288085 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-combined-ca-bundle\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.288087 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7675325b-f3c9-47e9-9992-bdc23d3a761f-logs\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.288386 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd065838-835d-4f4d-aa6f-27cde218b024-logs\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.295424 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-config-data-custom\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.296338 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-combined-ca-bundle\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.312926 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-config-data-custom\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.319699 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-666b746458-tsmhw"] Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.321561 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-config-data\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.344681 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ksqm\" (UniqueName: \"kubernetes.io/projected/7675325b-f3c9-47e9-9992-bdc23d3a761f-kube-api-access-5ksqm\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.348447 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvjst\" (UniqueName: \"kubernetes.io/projected/bd065838-835d-4f4d-aa6f-27cde218b024-kube-api-access-rvjst\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.351704 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd065838-835d-4f4d-aa6f-27cde218b024-config-data\") pod \"barbican-worker-fbf8688df-k8w9k\" (UID: \"bd065838-835d-4f4d-aa6f-27cde218b024\") " pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.360158 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7675325b-f3c9-47e9-9992-bdc23d3a761f-combined-ca-bundle\") pod \"barbican-keystone-listener-7447bc4958-7qrwm\" (UID: \"7675325b-f3c9-47e9-9992-bdc23d3a761f\") " pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395700 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-sb\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395763 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-combined-ca-bundle\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395787 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-nb\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395806 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c5k6\" (UniqueName: \"kubernetes.io/projected/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-kube-api-access-2c5k6\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395845 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data-custom\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395926 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-swift-storage-0\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395956 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67hkn\" (UniqueName: \"kubernetes.io/projected/4831cfcd-d5b0-427b-8977-a6120aa09966-kube-api-access-67hkn\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.395983 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-svc\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.396026 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4831cfcd-d5b0-427b-8977-a6120aa09966-logs\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.396046 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-config\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.396069 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.396873 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-sb\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.396939 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-nb\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.397485 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-svc\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.397881 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-swift-storage-0\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.398052 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-config\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.419493 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c5k6\" (UniqueName: \"kubernetes.io/projected/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-kube-api-access-2c5k6\") pod \"dnsmasq-dns-69bc85cf77-brbf5\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.430808 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.444301 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fbf8688df-k8w9k" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.467070 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.498030 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4831cfcd-d5b0-427b-8977-a6120aa09966-logs\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.498101 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.501912 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-combined-ca-bundle\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.501983 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data-custom\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.502120 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67hkn\" (UniqueName: \"kubernetes.io/projected/4831cfcd-d5b0-427b-8977-a6120aa09966-kube-api-access-67hkn\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.502222 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4831cfcd-d5b0-427b-8977-a6120aa09966-logs\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.516337 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data-custom\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.516883 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.517973 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-combined-ca-bundle\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.527198 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67hkn\" (UniqueName: \"kubernetes.io/projected/4831cfcd-d5b0-427b-8977-a6120aa09966-kube-api-access-67hkn\") pod \"barbican-api-666b746458-tsmhw\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:53 crc kubenswrapper[4611]: I0929 12:59:53.787731 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.192482 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69bc85cf77-brbf5"] Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.366554 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7447bc4958-7qrwm"] Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.454218 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-fbf8688df-k8w9k"] Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.608871 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-666b746458-tsmhw"] Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.678286 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-666b746458-tsmhw" event={"ID":"4831cfcd-d5b0-427b-8977-a6120aa09966","Type":"ContainerStarted","Data":"db75040bc352a3bd50b22f8146bce30d5b34a72a6aceb4863ecb9c5934277fa0"} Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.714991 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fbf8688df-k8w9k" event={"ID":"bd065838-835d-4f4d-aa6f-27cde218b024","Type":"ContainerStarted","Data":"0a016cfce8302024cfc4dea6449507db832259e941bfdc8ed27f0c4111fede58"} Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.716295 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" event={"ID":"7675325b-f3c9-47e9-9992-bdc23d3a761f","Type":"ContainerStarted","Data":"26491e69f280f32e4857caaef8e4db6071f6b0b06fd2ee2a7b03daffa3eaa966"} Sep 29 12:59:54 crc kubenswrapper[4611]: I0929 12:59:54.725762 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" event={"ID":"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6","Type":"ContainerStarted","Data":"ee23fd65176f8efb113ff9a9915d102fa65bebef2f750a911c0faad21a4570a4"} Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.770066 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-666b746458-tsmhw" event={"ID":"4831cfcd-d5b0-427b-8977-a6120aa09966","Type":"ContainerStarted","Data":"6bb6c96fb3568644c44a35022c823a692ebc02d386664360dffccf97aeebb165"} Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.771758 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-666b746458-tsmhw" event={"ID":"4831cfcd-d5b0-427b-8977-a6120aa09966","Type":"ContainerStarted","Data":"b129454ef4cb4f0c915cac9b668b958082da635a1afb8e2649ea113bccb23550"} Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.771925 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.772038 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.790920 4611 generic.go:334] "Generic (PLEG): container finished" podID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerID="79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9" exitCode=0 Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.791129 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" event={"ID":"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6","Type":"ContainerDied","Data":"79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9"} Sep 29 12:59:55 crc kubenswrapper[4611]: I0929 12:59:55.806762 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-666b746458-tsmhw" podStartSLOduration=2.8067384300000002 podStartE2EDuration="2.80673843s" podCreationTimestamp="2025-09-29 12:59:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:55.79390522 +0000 UTC m=+1182.685424836" watchObservedRunningTime="2025-09-29 12:59:55.80673843 +0000 UTC m=+1182.698258026" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.643004 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-646b84b57b-d2bbm"] Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.649493 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.655505 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.658315 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.680930 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-646b84b57b-d2bbm"] Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803499 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdbr6\" (UniqueName: \"kubernetes.io/projected/851aa640-9ae4-4045-a9ae-94ba85cc06da-kube-api-access-hdbr6\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803558 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-internal-tls-certs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803582 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-combined-ca-bundle\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803635 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aa640-9ae4-4045-a9ae-94ba85cc06da-logs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803671 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-config-data\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803722 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-public-tls-certs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.803756 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-config-data-custom\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.825264 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" event={"ID":"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6","Type":"ContainerStarted","Data":"aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5"} Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.825610 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.851870 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" podStartSLOduration=3.851852757 podStartE2EDuration="3.851852757s" podCreationTimestamp="2025-09-29 12:59:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:56.851317032 +0000 UTC m=+1183.742836628" watchObservedRunningTime="2025-09-29 12:59:56.851852757 +0000 UTC m=+1183.743372363" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.905904 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aa640-9ae4-4045-a9ae-94ba85cc06da-logs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.905979 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-config-data\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.906116 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-public-tls-certs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.906175 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-config-data-custom\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.906243 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdbr6\" (UniqueName: \"kubernetes.io/projected/851aa640-9ae4-4045-a9ae-94ba85cc06da-kube-api-access-hdbr6\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.906297 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-internal-tls-certs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.906329 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-combined-ca-bundle\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.909618 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aa640-9ae4-4045-a9ae-94ba85cc06da-logs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.915702 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-combined-ca-bundle\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.921768 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-config-data-custom\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.922919 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-public-tls-certs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.927285 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-internal-tls-certs\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.940169 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdbr6\" (UniqueName: \"kubernetes.io/projected/851aa640-9ae4-4045-a9ae-94ba85cc06da-kube-api-access-hdbr6\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:56 crc kubenswrapper[4611]: I0929 12:59:56.955684 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aa640-9ae4-4045-a9ae-94ba85cc06da-config-data\") pod \"barbican-api-646b84b57b-d2bbm\" (UID: \"851aa640-9ae4-4045-a9ae-94ba85cc06da\") " pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:57 crc kubenswrapper[4611]: I0929 12:59:57.005335 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:58 crc kubenswrapper[4611]: I0929 12:59:58.467638 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Sep 29 12:59:58 crc kubenswrapper[4611]: I0929 12:59:58.563371 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Sep 29 12:59:58 crc kubenswrapper[4611]: I0929 12:59:58.850097 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fbf8688df-k8w9k" event={"ID":"bd065838-835d-4f4d-aa6f-27cde218b024","Type":"ContainerStarted","Data":"d8c0a0b23138a43ce6bffe0676c95ca9b74b6eb30f2a685b5f8c143142ce39c1"} Sep 29 12:59:58 crc kubenswrapper[4611]: I0929 12:59:58.862023 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" event={"ID":"7675325b-f3c9-47e9-9992-bdc23d3a761f","Type":"ContainerStarted","Data":"4058f42b06ab7b6589924f3f5e79c16b2fcde149a41fcbf9628a66a7805e0aae"} Sep 29 12:59:59 crc kubenswrapper[4611]: W0929 12:59:59.001917 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod851aa640_9ae4_4045_a9ae_94ba85cc06da.slice/crio-1e7ee7d0269d6ec2b0dd722008f30f388b7f99498f11b9f4bfadd8ead6dd10c2 WatchSource:0}: Error finding container 1e7ee7d0269d6ec2b0dd722008f30f388b7f99498f11b9f4bfadd8ead6dd10c2: Status 404 returned error can't find the container with id 1e7ee7d0269d6ec2b0dd722008f30f388b7f99498f11b9f4bfadd8ead6dd10c2 Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.003553 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-646b84b57b-d2bbm"] Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.877298 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fbf8688df-k8w9k" event={"ID":"bd065838-835d-4f4d-aa6f-27cde218b024","Type":"ContainerStarted","Data":"b4abdafe120bcccc5622afde6965374ead62774a93f60b156e5e0439cb0c8d58"} Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.882710 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" event={"ID":"7675325b-f3c9-47e9-9992-bdc23d3a761f","Type":"ContainerStarted","Data":"bce41e0c5576325883b8fe3b1d6862139b9fabd92141729c36dddb0e1f0f2e59"} Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.885455 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646b84b57b-d2bbm" event={"ID":"851aa640-9ae4-4045-a9ae-94ba85cc06da","Type":"ContainerStarted","Data":"23f9c9ebabadf2e0606acff4a8d8462376c43a0bafab43e96573d126694eb348"} Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.885602 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646b84b57b-d2bbm" event={"ID":"851aa640-9ae4-4045-a9ae-94ba85cc06da","Type":"ContainerStarted","Data":"b45b84218ceeae433d285c4518c82bad46c0da96c2b1d14de8512a2029f70563"} Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.885733 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646b84b57b-d2bbm" event={"ID":"851aa640-9ae4-4045-a9ae-94ba85cc06da","Type":"ContainerStarted","Data":"1e7ee7d0269d6ec2b0dd722008f30f388b7f99498f11b9f4bfadd8ead6dd10c2"} Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.886759 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.886897 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.904563 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-fbf8688df-k8w9k" podStartSLOduration=3.873191127 podStartE2EDuration="7.904544803s" podCreationTimestamp="2025-09-29 12:59:52 +0000 UTC" firstStartedPulling="2025-09-29 12:59:54.492601447 +0000 UTC m=+1181.384121043" lastFinishedPulling="2025-09-29 12:59:58.523955113 +0000 UTC m=+1185.415474719" observedRunningTime="2025-09-29 12:59:59.900739054 +0000 UTC m=+1186.792258680" watchObservedRunningTime="2025-09-29 12:59:59.904544803 +0000 UTC m=+1186.796064409" Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.936045 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7447bc4958-7qrwm" podStartSLOduration=3.892974648 podStartE2EDuration="7.936021791s" podCreationTimestamp="2025-09-29 12:59:52 +0000 UTC" firstStartedPulling="2025-09-29 12:59:54.482022492 +0000 UTC m=+1181.373542098" lastFinishedPulling="2025-09-29 12:59:58.525069635 +0000 UTC m=+1185.416589241" observedRunningTime="2025-09-29 12:59:59.921192224 +0000 UTC m=+1186.812711850" watchObservedRunningTime="2025-09-29 12:59:59.936021791 +0000 UTC m=+1186.827541397" Sep 29 12:59:59 crc kubenswrapper[4611]: I0929 12:59:59.959969 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-646b84b57b-d2bbm" podStartSLOduration=3.9599435610000002 podStartE2EDuration="3.959943561s" podCreationTimestamp="2025-09-29 12:59:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 12:59:59.951814437 +0000 UTC m=+1186.843334053" watchObservedRunningTime="2025-09-29 12:59:59.959943561 +0000 UTC m=+1186.851463177" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.165004 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk"] Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.166929 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.169099 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.169535 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.187541 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk"] Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.285710 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2335d26b-bfa6-4d00-b9a1-a6ed61250684-secret-volume\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.286131 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2rz7\" (UniqueName: \"kubernetes.io/projected/2335d26b-bfa6-4d00-b9a1-a6ed61250684-kube-api-access-j2rz7\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.286220 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2335d26b-bfa6-4d00-b9a1-a6ed61250684-config-volume\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.387529 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2rz7\" (UniqueName: \"kubernetes.io/projected/2335d26b-bfa6-4d00-b9a1-a6ed61250684-kube-api-access-j2rz7\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.387708 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2335d26b-bfa6-4d00-b9a1-a6ed61250684-config-volume\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.387799 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2335d26b-bfa6-4d00-b9a1-a6ed61250684-secret-volume\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.389034 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2335d26b-bfa6-4d00-b9a1-a6ed61250684-config-volume\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.406867 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2335d26b-bfa6-4d00-b9a1-a6ed61250684-secret-volume\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.409750 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2rz7\" (UniqueName: \"kubernetes.io/projected/2335d26b-bfa6-4d00-b9a1-a6ed61250684-kube-api-access-j2rz7\") pod \"collect-profiles-29319180-v6tzk\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:00 crc kubenswrapper[4611]: I0929 13:00:00.515938 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:02 crc kubenswrapper[4611]: I0929 13:00:02.940825 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 13:00:03 crc kubenswrapper[4611]: I0929 13:00:03.469847 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 13:00:03 crc kubenswrapper[4611]: I0929 13:00:03.541394 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5db5f45875-vs8fl"] Sep 29 13:00:03 crc kubenswrapper[4611]: I0929 13:00:03.542023 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="dnsmasq-dns" containerID="cri-o://99ce548ec2224936914a5fd77a487bfb296306c9755e9439626519d4eed51f39" gracePeriod=10 Sep 29 13:00:03 crc kubenswrapper[4611]: I0929 13:00:03.963237 4611 generic.go:334] "Generic (PLEG): container finished" podID="012ad4e9-ec08-45d6-80c8-db61653af044" containerID="99ce548ec2224936914a5fd77a487bfb296306c9755e9439626519d4eed51f39" exitCode=0 Sep 29 13:00:03 crc kubenswrapper[4611]: I0929 13:00:03.963420 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" event={"ID":"012ad4e9-ec08-45d6-80c8-db61653af044","Type":"ContainerDied","Data":"99ce548ec2224936914a5fd77a487bfb296306c9755e9439626519d4eed51f39"} Sep 29 13:00:04 crc kubenswrapper[4611]: I0929 13:00:04.628647 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:00:04 crc kubenswrapper[4611]: I0929 13:00:04.628697 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:00:04 crc kubenswrapper[4611]: I0929 13:00:04.987909 4611 generic.go:334] "Generic (PLEG): container finished" podID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" containerID="de5ed9e3669155a1741ee806c28b219f73255f7bca033bc129a01ef986b8f519" exitCode=0 Sep 29 13:00:04 crc kubenswrapper[4611]: I0929 13:00:04.987945 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nmkf9" event={"ID":"578c0d20-ef6e-43d0-8290-1ec794d9a0ee","Type":"ContainerDied","Data":"de5ed9e3669155a1741ee806c28b219f73255f7bca033bc129a01ef986b8f519"} Sep 29 13:00:05 crc kubenswrapper[4611]: I0929 13:00:05.640806 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-697f494f47-d2wfq" Sep 29 13:00:05 crc kubenswrapper[4611]: I0929 13:00:05.722259 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-668bd746bd-wbxzt"] Sep 29 13:00:05 crc kubenswrapper[4611]: I0929 13:00:05.722506 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-668bd746bd-wbxzt" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-api" containerID="cri-o://7d5064836c84d7385443320df67675a8be13b21da8d1dad74c305ec1ea9338c9" gracePeriod=30 Sep 29 13:00:05 crc kubenswrapper[4611]: I0929 13:00:05.723010 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-668bd746bd-wbxzt" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-httpd" containerID="cri-o://614014d53528961e58f380bbac47797f4939d92d8e5ecd17843ea81902156172" gracePeriod=30 Sep 29 13:00:06 crc kubenswrapper[4611]: I0929 13:00:06.038758 4611 generic.go:334] "Generic (PLEG): container finished" podID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerID="614014d53528961e58f380bbac47797f4939d92d8e5ecd17843ea81902156172" exitCode=0 Sep 29 13:00:06 crc kubenswrapper[4611]: I0929 13:00:06.040138 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-668bd746bd-wbxzt" event={"ID":"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7","Type":"ContainerDied","Data":"614014d53528961e58f380bbac47797f4939d92d8e5ecd17843ea81902156172"} Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.523305 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nmkf9" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.669385 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-db-sync-config-data\") pod \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.669475 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-config-data\") pod \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.669536 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wmdr\" (UniqueName: \"kubernetes.io/projected/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-kube-api-access-7wmdr\") pod \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.669657 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-scripts\") pod \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.669674 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-combined-ca-bundle\") pod \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.669772 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-etc-machine-id\") pod \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\" (UID: \"578c0d20-ef6e-43d0-8290-1ec794d9a0ee\") " Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.670322 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "578c0d20-ef6e-43d0-8290-1ec794d9a0ee" (UID: "578c0d20-ef6e-43d0-8290-1ec794d9a0ee"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.692689 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "578c0d20-ef6e-43d0-8290-1ec794d9a0ee" (UID: "578c0d20-ef6e-43d0-8290-1ec794d9a0ee"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.699885 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-scripts" (OuterVolumeSpecName: "scripts") pod "578c0d20-ef6e-43d0-8290-1ec794d9a0ee" (UID: "578c0d20-ef6e-43d0-8290-1ec794d9a0ee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.700104 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-kube-api-access-7wmdr" (OuterVolumeSpecName: "kube-api-access-7wmdr") pod "578c0d20-ef6e-43d0-8290-1ec794d9a0ee" (UID: "578c0d20-ef6e-43d0-8290-1ec794d9a0ee"). InnerVolumeSpecName "kube-api-access-7wmdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.742730 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "578c0d20-ef6e-43d0-8290-1ec794d9a0ee" (UID: "578c0d20-ef6e-43d0-8290-1ec794d9a0ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.771853 4611 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.771884 4611 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.771894 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wmdr\" (UniqueName: \"kubernetes.io/projected/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-kube-api-access-7wmdr\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.771904 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.771912 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.836003 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.887807 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.920597 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-config-data" (OuterVolumeSpecName: "config-data") pod "578c0d20-ef6e-43d0-8290-1ec794d9a0ee" (UID: "578c0d20-ef6e-43d0-8290-1ec794d9a0ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:07 crc kubenswrapper[4611]: I0929 13:00:07.987512 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/578c0d20-ef6e-43d0-8290-1ec794d9a0ee-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.079711 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nmkf9" event={"ID":"578c0d20-ef6e-43d0-8290-1ec794d9a0ee","Type":"ContainerDied","Data":"6add29a6f8fd1d79d349f1d561daca67f72f71b8cb6a57b384a4c58048c829e5"} Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.079752 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6add29a6f8fd1d79d349f1d561daca67f72f71b8cb6a57b384a4c58048c829e5" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.079830 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nmkf9" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.864814 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.864839 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.988171 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:08 crc kubenswrapper[4611]: E0929 13:00:08.988640 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" containerName="cinder-db-sync" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.988662 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" containerName="cinder-db-sync" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.988871 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" containerName="cinder-db-sync" Sep 29 13:00:08 crc kubenswrapper[4611]: I0929 13:00:08.997974 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.005742 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-64pb2" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.005960 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.005982 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.006070 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.057884 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.121929 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d77d963-760c-4b7b-82e6-51ea53e9daa6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.121985 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-scripts\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.122347 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.122430 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lrc4\" (UniqueName: \"kubernetes.io/projected/1d77d963-760c-4b7b-82e6-51ea53e9daa6-kube-api-access-9lrc4\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.122801 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.122865 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.166434 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65c7b5795c-4r8z6"] Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.167986 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.189436 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.191366 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.195667 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.215527 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65c7b5795c-4r8z6"] Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.235843 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.235892 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.235921 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d77d963-760c-4b7b-82e6-51ea53e9daa6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.235950 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-scripts\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.236035 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.236070 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lrc4\" (UniqueName: \"kubernetes.io/projected/1d77d963-760c-4b7b-82e6-51ea53e9daa6-kube-api-access-9lrc4\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.240082 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d77d963-760c-4b7b-82e6-51ea53e9daa6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.262748 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.263859 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.278391 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.295811 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.301615 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lrc4\" (UniqueName: \"kubernetes.io/projected/1d77d963-760c-4b7b-82e6-51ea53e9daa6-kube-api-access-9lrc4\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.303141 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-scripts\") pod \"cinder-scheduler-0\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.327070 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337184 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-nb\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337230 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbf4w\" (UniqueName: \"kubernetes.io/projected/179e93cd-b67b-4218-bf50-69fca7227443-kube-api-access-rbf4w\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337249 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data-custom\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337285 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-scripts\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337311 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337359 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-swift-storage-0\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337384 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/179e93cd-b67b-4218-bf50-69fca7227443-etc-machine-id\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337403 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337423 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/179e93cd-b67b-4218-bf50-69fca7227443-logs\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337438 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-config\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337453 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-sb\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337471 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-svc\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.337523 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cddkz\" (UniqueName: \"kubernetes.io/projected/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-kube-api-access-cddkz\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438671 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbf4w\" (UniqueName: \"kubernetes.io/projected/179e93cd-b67b-4218-bf50-69fca7227443-kube-api-access-rbf4w\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438706 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-nb\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438725 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data-custom\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438744 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-scripts\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438775 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438824 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-swift-storage-0\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438852 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/179e93cd-b67b-4218-bf50-69fca7227443-etc-machine-id\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438872 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438893 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/179e93cd-b67b-4218-bf50-69fca7227443-logs\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438909 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-config\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438923 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-sb\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438944 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-svc\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.438999 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cddkz\" (UniqueName: \"kubernetes.io/projected/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-kube-api-access-cddkz\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.442817 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/179e93cd-b67b-4218-bf50-69fca7227443-etc-machine-id\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.443686 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-nb\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.444845 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-config\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.446047 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/179e93cd-b67b-4218-bf50-69fca7227443-logs\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.451411 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-svc\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.453827 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-sb\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.457696 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-swift-storage-0\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.458257 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.459211 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data-custom\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.480221 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbf4w\" (UniqueName: \"kubernetes.io/projected/179e93cd-b67b-4218-bf50-69fca7227443-kube-api-access-rbf4w\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.482287 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-scripts\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.486532 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data\") pod \"cinder-api-0\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " pod="openstack/cinder-api-0" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.491919 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cddkz\" (UniqueName: \"kubernetes.io/projected/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-kube-api-access-cddkz\") pod \"dnsmasq-dns-65c7b5795c-4r8z6\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.557664 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:09 crc kubenswrapper[4611]: I0929 13:00:09.585957 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.056893 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-646b84b57b-d2bbm" podUID="851aa640-9ae4-4045-a9ae-94ba85cc06da" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.057025 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-646b84b57b-d2bbm" podUID="851aa640-9ae4-4045-a9ae-94ba85cc06da" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.632813 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.797410 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.854016 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-646b84b57b-d2bbm" Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.922441 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-swift-storage-0\") pod \"012ad4e9-ec08-45d6-80c8-db61653af044\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.922488 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-config\") pod \"012ad4e9-ec08-45d6-80c8-db61653af044\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.922557 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ss57\" (UniqueName: \"kubernetes.io/projected/012ad4e9-ec08-45d6-80c8-db61653af044-kube-api-access-5ss57\") pod \"012ad4e9-ec08-45d6-80c8-db61653af044\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.922612 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-nb\") pod \"012ad4e9-ec08-45d6-80c8-db61653af044\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.922695 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-svc\") pod \"012ad4e9-ec08-45d6-80c8-db61653af044\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.922731 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-sb\") pod \"012ad4e9-ec08-45d6-80c8-db61653af044\" (UID: \"012ad4e9-ec08-45d6-80c8-db61653af044\") " Sep 29 13:00:11 crc kubenswrapper[4611]: I0929 13:00:11.994990 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/012ad4e9-ec08-45d6-80c8-db61653af044-kube-api-access-5ss57" (OuterVolumeSpecName: "kube-api-access-5ss57") pod "012ad4e9-ec08-45d6-80c8-db61653af044" (UID: "012ad4e9-ec08-45d6-80c8-db61653af044"). InnerVolumeSpecName "kube-api-access-5ss57". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.033843 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ss57\" (UniqueName: \"kubernetes.io/projected/012ad4e9-ec08-45d6-80c8-db61653af044-kube-api-access-5ss57\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.036654 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-666b746458-tsmhw"] Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.036908 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" containerID="cri-o://b129454ef4cb4f0c915cac9b668b958082da635a1afb8e2649ea113bccb23550" gracePeriod=30 Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.037367 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" containerID="cri-o://6bb6c96fb3568644c44a35022c823a692ebc02d386664360dffccf97aeebb165" gracePeriod=30 Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.091088 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": EOF" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.093782 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": EOF" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.125605 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": EOF" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.126153 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": EOF" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.128890 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "012ad4e9-ec08-45d6-80c8-db61653af044" (UID: "012ad4e9-ec08-45d6-80c8-db61653af044"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.146252 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.234572 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "012ad4e9-ec08-45d6-80c8-db61653af044" (UID: "012ad4e9-ec08-45d6-80c8-db61653af044"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.252665 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.262519 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "012ad4e9-ec08-45d6-80c8-db61653af044" (UID: "012ad4e9-ec08-45d6-80c8-db61653af044"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.280546 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-config" (OuterVolumeSpecName: "config") pod "012ad4e9-ec08-45d6-80c8-db61653af044" (UID: "012ad4e9-ec08-45d6-80c8-db61653af044"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.299519 4611 generic.go:334] "Generic (PLEG): container finished" podID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerID="b129454ef4cb4f0c915cac9b668b958082da635a1afb8e2649ea113bccb23550" exitCode=143 Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.299646 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-666b746458-tsmhw" event={"ID":"4831cfcd-d5b0-427b-8977-a6120aa09966","Type":"ContainerDied","Data":"b129454ef4cb4f0c915cac9b668b958082da635a1afb8e2649ea113bccb23550"} Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.312322 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "012ad4e9-ec08-45d6-80c8-db61653af044" (UID: "012ad4e9-ec08-45d6-80c8-db61653af044"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.319477 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.319823 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" event={"ID":"012ad4e9-ec08-45d6-80c8-db61653af044","Type":"ContainerDied","Data":"d1e8f6cca58c6d88bdc9f9d1a607f7bf5cfa1ca0f9d32db0bc9d14632eabab82"} Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.319915 4611 scope.go:117] "RemoveContainer" containerID="99ce548ec2224936914a5fd77a487bfb296306c9755e9439626519d4eed51f39" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.362756 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.362786 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.362795 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/012ad4e9-ec08-45d6-80c8-db61653af044-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.399571 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.408431 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5db5f45875-vs8fl"] Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.415579 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5db5f45875-vs8fl"] Sep 29 13:00:12 crc kubenswrapper[4611]: E0929 13:00:12.427207 4611 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod012ad4e9_ec08_45d6_80c8_db61653af044.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4831cfcd_d5b0_427b_8977_a6120aa09966.slice/crio-conmon-b129454ef4cb4f0c915cac9b668b958082da635a1afb8e2649ea113bccb23550.scope\": RecentStats: unable to find data in memory cache]" Sep 29 13:00:12 crc kubenswrapper[4611]: I0929 13:00:12.888981 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5db5f45875-vs8fl" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.157:5353: i/o timeout" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.346985 4611 generic.go:334] "Generic (PLEG): container finished" podID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerID="7d5064836c84d7385443320df67675a8be13b21da8d1dad74c305ec1ea9338c9" exitCode=0 Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.347272 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-668bd746bd-wbxzt" event={"ID":"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7","Type":"ContainerDied","Data":"7d5064836c84d7385443320df67675a8be13b21da8d1dad74c305ec1ea9338c9"} Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.471820 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.471913 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.472778 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"d6fee3b5d5de62a3b7b8286065e555d624605989eba216898d5cc83f4d5aa788"} pod="openstack/horizon-5c9489c674-t4fp8" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.472816 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" containerID="cri-o://d6fee3b5d5de62a3b7b8286065e555d624605989eba216898d5cc83f4d5aa788" gracePeriod=30 Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.568018 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.568119 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.569019 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"d741cd0ee1243c3bc0d72e207169fa52be409271801e3754c56404673f372a6e"} pod="openstack/horizon-6f7ffcb4c4-cz4zj" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.569062 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" containerID="cri-o://d741cd0ee1243c3bc0d72e207169fa52be409271801e3754c56404673f372a6e" gracePeriod=30 Sep 29 13:00:13 crc kubenswrapper[4611]: E0929 13:00:13.576308 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Sep 29 13:00:13 crc kubenswrapper[4611]: E0929 13:00:13.576590 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sdb7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(62331da1-a2da-4934-b0bd-8cee7d29bdfb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 13:00:13 crc kubenswrapper[4611]: E0929 13:00:13.577818 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.670404 4611 scope.go:117] "RemoveContainer" containerID="93a4d0c596c05a59b0dc6ed24ddb36557d0e89be817684de3d920da49315aa7a" Sep 29 13:00:13 crc kubenswrapper[4611]: I0929 13:00:13.804007 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" path="/var/lib/kubelet/pods/012ad4e9-ec08-45d6-80c8-db61653af044/volumes" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.275814 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk"] Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.507574 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" event={"ID":"2335d26b-bfa6-4d00-b9a1-a6ed61250684","Type":"ContainerStarted","Data":"70685e156f3d41ccc2cc271f5b9a56ce13cde2e76859fa96703ad8b411888c4d"} Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.521164 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-central-agent" containerID="cri-o://245cf191c2b0db624f95eaecf3ce3924c26650437e96f0b9ceaa53f75873dcd1" gracePeriod=30 Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.521794 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="sg-core" containerID="cri-o://1e3bef10a9d8fb2819a441ef364c47bf65035e558208a0996d9977db26e60250" gracePeriod=30 Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.521861 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-notification-agent" containerID="cri-o://50c6d6fbcacbc473633626cefd5f06aca330aed877d600da176b9c172dd4b32a" gracePeriod=30 Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.677710 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.688232 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.766893 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.775837 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-combined-ca-bundle\") pod \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.775898 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bjjm\" (UniqueName: \"kubernetes.io/projected/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-kube-api-access-6bjjm\") pod \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.775944 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-ovndb-tls-certs\") pod \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.776001 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-httpd-config\") pod \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.776091 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-config\") pod \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\" (UID: \"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7\") " Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.797571 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" (UID: "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.801224 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-kube-api-access-6bjjm" (OuterVolumeSpecName: "kube-api-access-6bjjm") pod "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" (UID: "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7"). InnerVolumeSpecName "kube-api-access-6bjjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.883136 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bjjm\" (UniqueName: \"kubernetes.io/projected/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-kube-api-access-6bjjm\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.883176 4611 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.964844 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-config" (OuterVolumeSpecName: "config") pod "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" (UID: "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:14 crc kubenswrapper[4611]: I0929 13:00:14.987318 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.061058 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65c7b5795c-4r8z6"] Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.090185 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" (UID: "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.189864 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" (UID: "20f6f2d8-69ae-4e63-8aa3-08a007eba4e7"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.192774 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.192804 4611 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.584392 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"179e93cd-b67b-4218-bf50-69fca7227443","Type":"ContainerStarted","Data":"223c239df6a30e48b5f311ca5dd66a6f689bd7b1497ec2b0b108abd12067a11f"} Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.619914 4611 generic.go:334] "Generic (PLEG): container finished" podID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerID="1e3bef10a9d8fb2819a441ef364c47bf65035e558208a0996d9977db26e60250" exitCode=2 Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.620000 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerDied","Data":"1e3bef10a9d8fb2819a441ef364c47bf65035e558208a0996d9977db26e60250"} Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.656967 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-668bd746bd-wbxzt" event={"ID":"20f6f2d8-69ae-4e63-8aa3-08a007eba4e7","Type":"ContainerDied","Data":"75d844b2fe35c88917989b8635f0942bfa9072f3c15df8485970f953bf0ddda7"} Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.657029 4611 scope.go:117] "RemoveContainer" containerID="614014d53528961e58f380bbac47797f4939d92d8e5ecd17843ea81902156172" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.657262 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-668bd746bd-wbxzt" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.676430 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" event={"ID":"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2","Type":"ContainerStarted","Data":"44a6e699b19f95f403399ba0ba9d46a51baa6d4e878b0cd1c1817ea5a4d91401"} Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.690946 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1d77d963-760c-4b7b-82e6-51ea53e9daa6","Type":"ContainerStarted","Data":"1dfd736215bc09375c02d2768d37d67344b63f7e48daab6ae6c454f6ea514707"} Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.719879 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" event={"ID":"2335d26b-bfa6-4d00-b9a1-a6ed61250684","Type":"ContainerStarted","Data":"70958e3a5a488608a2d03152b5a933319323321e0dfb317c1f329e7d77342323"} Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.761774 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" podStartSLOduration=15.761748188 podStartE2EDuration="15.761748188s" podCreationTimestamp="2025-09-29 13:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:15.747664422 +0000 UTC m=+1202.639184028" watchObservedRunningTime="2025-09-29 13:00:15.761748188 +0000 UTC m=+1202.653267794" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.788005 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-668bd746bd-wbxzt"] Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.812777 4611 scope.go:117] "RemoveContainer" containerID="7d5064836c84d7385443320df67675a8be13b21da8d1dad74c305ec1ea9338c9" Sep 29 13:00:15 crc kubenswrapper[4611]: I0929 13:00:15.816188 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-668bd746bd-wbxzt"] Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.067075 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-646b84b57b-d2bbm" podUID="851aa640-9ae4-4045-a9ae-94ba85cc06da" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.067419 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-646b84b57b-d2bbm" podUID="851aa640-9ae4-4045-a9ae-94ba85cc06da" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.746258 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"179e93cd-b67b-4218-bf50-69fca7227443","Type":"ContainerStarted","Data":"7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6"} Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.752243 4611 generic.go:334] "Generic (PLEG): container finished" podID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerID="245cf191c2b0db624f95eaecf3ce3924c26650437e96f0b9ceaa53f75873dcd1" exitCode=0 Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.752542 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerDied","Data":"245cf191c2b0db624f95eaecf3ce3924c26650437e96f0b9ceaa53f75873dcd1"} Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.775297 4611 generic.go:334] "Generic (PLEG): container finished" podID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerID="1e7c474a3b8a6754c41a0e4dab3c9c847c65125b8cb0a875e9c712b8d679227a" exitCode=0 Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.775590 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" event={"ID":"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2","Type":"ContainerDied","Data":"1e7c474a3b8a6754c41a0e4dab3c9c847c65125b8cb0a875e9c712b8d679227a"} Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.793053 4611 generic.go:334] "Generic (PLEG): container finished" podID="2335d26b-bfa6-4d00-b9a1-a6ed61250684" containerID="70958e3a5a488608a2d03152b5a933319323321e0dfb317c1f329e7d77342323" exitCode=0 Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.793236 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" event={"ID":"2335d26b-bfa6-4d00-b9a1-a6ed61250684","Type":"ContainerDied","Data":"70958e3a5a488608a2d03152b5a933319323321e0dfb317c1f329e7d77342323"} Sep 29 13:00:16 crc kubenswrapper[4611]: I0929 13:00:16.865742 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-646b84b57b-d2bbm" podUID="851aa640-9ae4-4045-a9ae-94ba85cc06da" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.209850 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.210392 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.750425 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" path="/var/lib/kubelet/pods/20f6f2d8-69ae-4e63-8aa3-08a007eba4e7/volumes" Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.809600 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" event={"ID":"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2","Type":"ContainerStarted","Data":"6e001040663132fa6d98e1547c84d37b6fca049464c8bab2177e1d60f78b2508"} Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.809844 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.813371 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1d77d963-760c-4b7b-82e6-51ea53e9daa6","Type":"ContainerStarted","Data":"b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87"} Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.819180 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api-log" containerID="cri-o://7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6" gracePeriod=30 Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.819062 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"179e93cd-b67b-4218-bf50-69fca7227443","Type":"ContainerStarted","Data":"16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d"} Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.819472 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api" containerID="cri-o://16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d" gracePeriod=30 Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.838659 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" podStartSLOduration=8.838643176 podStartE2EDuration="8.838643176s" podCreationTimestamp="2025-09-29 13:00:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:17.836244347 +0000 UTC m=+1204.727763963" watchObservedRunningTime="2025-09-29 13:00:17.838643176 +0000 UTC m=+1204.730162782" Sep 29 13:00:17 crc kubenswrapper[4611]: I0929 13:00:17.869286 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=8.869267739 podStartE2EDuration="8.869267739s" podCreationTimestamp="2025-09-29 13:00:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:17.867259681 +0000 UTC m=+1204.758779297" watchObservedRunningTime="2025-09-29 13:00:17.869267739 +0000 UTC m=+1204.760787345" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.366428 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.499379 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2rz7\" (UniqueName: \"kubernetes.io/projected/2335d26b-bfa6-4d00-b9a1-a6ed61250684-kube-api-access-j2rz7\") pod \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.499542 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2335d26b-bfa6-4d00-b9a1-a6ed61250684-secret-volume\") pod \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.499586 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2335d26b-bfa6-4d00-b9a1-a6ed61250684-config-volume\") pod \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\" (UID: \"2335d26b-bfa6-4d00-b9a1-a6ed61250684\") " Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.513732 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2335d26b-bfa6-4d00-b9a1-a6ed61250684-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2335d26b-bfa6-4d00-b9a1-a6ed61250684" (UID: "2335d26b-bfa6-4d00-b9a1-a6ed61250684"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.514114 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2335d26b-bfa6-4d00-b9a1-a6ed61250684-kube-api-access-j2rz7" (OuterVolumeSpecName: "kube-api-access-j2rz7") pod "2335d26b-bfa6-4d00-b9a1-a6ed61250684" (UID: "2335d26b-bfa6-4d00-b9a1-a6ed61250684"). InnerVolumeSpecName "kube-api-access-j2rz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.514761 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2335d26b-bfa6-4d00-b9a1-a6ed61250684-config-volume" (OuterVolumeSpecName: "config-volume") pod "2335d26b-bfa6-4d00-b9a1-a6ed61250684" (UID: "2335d26b-bfa6-4d00-b9a1-a6ed61250684"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.601300 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2335d26b-bfa6-4d00-b9a1-a6ed61250684-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.601333 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2335d26b-bfa6-4d00-b9a1-a6ed61250684-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.601345 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2rz7\" (UniqueName: \"kubernetes.io/projected/2335d26b-bfa6-4d00-b9a1-a6ed61250684-kube-api-access-j2rz7\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.917787 4611 generic.go:334] "Generic (PLEG): container finished" podID="179e93cd-b67b-4218-bf50-69fca7227443" containerID="16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d" exitCode=0 Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.917811 4611 generic.go:334] "Generic (PLEG): container finished" podID="179e93cd-b67b-4218-bf50-69fca7227443" containerID="7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6" exitCode=143 Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.917886 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"179e93cd-b67b-4218-bf50-69fca7227443","Type":"ContainerDied","Data":"16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d"} Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.917940 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"179e93cd-b67b-4218-bf50-69fca7227443","Type":"ContainerDied","Data":"7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6"} Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.922079 4611 generic.go:334] "Generic (PLEG): container finished" podID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerID="50c6d6fbcacbc473633626cefd5f06aca330aed877d600da176b9c172dd4b32a" exitCode=0 Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.922162 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerDied","Data":"50c6d6fbcacbc473633626cefd5f06aca330aed877d600da176b9c172dd4b32a"} Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.932017 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1d77d963-760c-4b7b-82e6-51ea53e9daa6","Type":"ContainerStarted","Data":"5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7"} Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.949860 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.953361 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk" event={"ID":"2335d26b-bfa6-4d00-b9a1-a6ed61250684","Type":"ContainerDied","Data":"70685e156f3d41ccc2cc271f5b9a56ce13cde2e76859fa96703ad8b411888c4d"} Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.953436 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70685e156f3d41ccc2cc271f5b9a56ce13cde2e76859fa96703ad8b411888c4d" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.971199 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=9.893933891 podStartE2EDuration="10.971180324s" podCreationTimestamp="2025-09-29 13:00:08 +0000 UTC" firstStartedPulling="2025-09-29 13:00:14.862634781 +0000 UTC m=+1201.754154377" lastFinishedPulling="2025-09-29 13:00:15.939881214 +0000 UTC m=+1202.831400810" observedRunningTime="2025-09-29 13:00:18.964595954 +0000 UTC m=+1205.856115570" watchObservedRunningTime="2025-09-29 13:00:18.971180324 +0000 UTC m=+1205.862699930" Sep 29 13:00:18 crc kubenswrapper[4611]: I0929 13:00:18.982814 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.011775 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/179e93cd-b67b-4218-bf50-69fca7227443-logs\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.012064 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.012187 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/179e93cd-b67b-4218-bf50-69fca7227443-etc-machine-id\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.012350 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbf4w\" (UniqueName: \"kubernetes.io/projected/179e93cd-b67b-4218-bf50-69fca7227443-kube-api-access-rbf4w\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.012454 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data-custom\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.012585 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-scripts\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.012775 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-combined-ca-bundle\") pod \"179e93cd-b67b-4218-bf50-69fca7227443\" (UID: \"179e93cd-b67b-4218-bf50-69fca7227443\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.015223 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/179e93cd-b67b-4218-bf50-69fca7227443-logs" (OuterVolumeSpecName: "logs") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.015769 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/179e93cd-b67b-4218-bf50-69fca7227443-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.032277 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.042076 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/179e93cd-b67b-4218-bf50-69fca7227443-kube-api-access-rbf4w" (OuterVolumeSpecName: "kube-api-access-rbf4w") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "kube-api-access-rbf4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.045126 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-scripts" (OuterVolumeSpecName: "scripts") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.092987 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.121574 4611 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.121635 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.121648 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.121658 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/179e93cd-b67b-4218-bf50-69fca7227443-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.121669 4611 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/179e93cd-b67b-4218-bf50-69fca7227443-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.121680 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbf4w\" (UniqueName: \"kubernetes.io/projected/179e93cd-b67b-4218-bf50-69fca7227443-kube-api-access-rbf4w\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.143734 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data" (OuterVolumeSpecName: "config-data") pod "179e93cd-b67b-4218-bf50-69fca7227443" (UID: "179e93cd-b67b-4218-bf50-69fca7227443"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.216345 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.223264 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/179e93cd-b67b-4218-bf50-69fca7227443-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.324746 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-sg-core-conf-yaml\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325280 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-run-httpd\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325376 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-log-httpd\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325462 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-config-data\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325507 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-combined-ca-bundle\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325539 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-scripts\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325576 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdb7l\" (UniqueName: \"kubernetes.io/projected/62331da1-a2da-4934-b0bd-8cee7d29bdfb-kube-api-access-sdb7l\") pod \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\" (UID: \"62331da1-a2da-4934-b0bd-8cee7d29bdfb\") " Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325892 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.325919 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.328388 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.328433 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62331da1-a2da-4934-b0bd-8cee7d29bdfb-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.332061 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.342414 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62331da1-a2da-4934-b0bd-8cee7d29bdfb-kube-api-access-sdb7l" (OuterVolumeSpecName: "kube-api-access-sdb7l") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "kube-api-access-sdb7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.387191 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-scripts" (OuterVolumeSpecName: "scripts") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.391287 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.405523 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-config-data" (OuterVolumeSpecName: "config-data") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.417032 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62331da1-a2da-4934-b0bd-8cee7d29bdfb" (UID: "62331da1-a2da-4934-b0bd-8cee7d29bdfb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.429799 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.429844 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.429861 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.429874 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdb7l\" (UniqueName: \"kubernetes.io/projected/62331da1-a2da-4934-b0bd-8cee7d29bdfb-kube-api-access-sdb7l\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.429887 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62331da1-a2da-4934-b0bd-8cee7d29bdfb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:19 crc kubenswrapper[4611]: W0929 13:00:19.790693 4611 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-conmon-7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-conmon-7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6.scope: no such file or directory Sep 29 13:00:19 crc kubenswrapper[4611]: W0929 13:00:19.790753 4611 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6.scope: no such file or directory Sep 29 13:00:19 crc kubenswrapper[4611]: W0929 13:00:19.794269 4611 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-conmon-16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-conmon-16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d.scope: no such file or directory Sep 29 13:00:19 crc kubenswrapper[4611]: W0929 13:00:19.794317 4611 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179e93cd_b67b_4218_bf50_69fca7227443.slice/crio-16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d.scope: no such file or directory Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.974717 4611 generic.go:334] "Generic (PLEG): container finished" podID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerID="d6fee3b5d5de62a3b7b8286065e555d624605989eba216898d5cc83f4d5aa788" exitCode=0 Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.974794 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerDied","Data":"d6fee3b5d5de62a3b7b8286065e555d624605989eba216898d5cc83f4d5aa788"} Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.980158 4611 generic.go:334] "Generic (PLEG): container finished" podID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerID="d741cd0ee1243c3bc0d72e207169fa52be409271801e3754c56404673f372a6e" exitCode=0 Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.980193 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerDied","Data":"d741cd0ee1243c3bc0d72e207169fa52be409271801e3754c56404673f372a6e"} Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.983141 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"179e93cd-b67b-4218-bf50-69fca7227443","Type":"ContainerDied","Data":"223c239df6a30e48b5f311ca5dd66a6f689bd7b1497ec2b0b108abd12067a11f"} Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.983190 4611 scope.go:117] "RemoveContainer" containerID="16ec50f3a0cdca06ef03ed9c9e24c6cedf1c8346fd2c3d4669156aff6071210d" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.983342 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.992399 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:19 crc kubenswrapper[4611]: I0929 13:00:19.993347 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62331da1-a2da-4934-b0bd-8cee7d29bdfb","Type":"ContainerDied","Data":"d86e34fe65a7eeda96147527181b93fbfa27fa2e76f7849a132328ccd9762c3b"} Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.024338 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.037899 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.041720 4611 scope.go:117] "RemoveContainer" containerID="7a0e9cc8bd72c758752f83eb11aa8c08dd2307e58d96c6ae7dc510c3e1c45ff6" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.071707 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072168 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2335d26b-bfa6-4d00-b9a1-a6ed61250684" containerName="collect-profiles" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072187 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2335d26b-bfa6-4d00-b9a1-a6ed61250684" containerName="collect-profiles" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072208 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="dnsmasq-dns" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072215 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="dnsmasq-dns" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072245 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api-log" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072253 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api-log" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072267 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-api" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072275 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-api" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072286 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072293 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072303 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-notification-agent" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072310 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-notification-agent" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072324 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="sg-core" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072330 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="sg-core" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072348 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-central-agent" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072355 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-central-agent" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072374 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="init" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072382 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="init" Sep 29 13:00:20 crc kubenswrapper[4611]: E0929 13:00:20.072399 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-httpd" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072407 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-httpd" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072611 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-central-agent" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072641 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2335d26b-bfa6-4d00-b9a1-a6ed61250684" containerName="collect-profiles" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072659 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="ceilometer-notification-agent" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072676 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api-log" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072685 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="012ad4e9-ec08-45d6-80c8-db61653af044" containerName="dnsmasq-dns" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072697 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-httpd" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072710 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="179e93cd-b67b-4218-bf50-69fca7227443" containerName="cinder-api" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072721 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="20f6f2d8-69ae-4e63-8aa3-08a007eba4e7" containerName="neutron-api" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.072736 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" containerName="sg-core" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.073918 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.078607 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.078833 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.079033 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.082388 4611 scope.go:117] "RemoveContainer" containerID="1e3bef10a9d8fb2819a441ef364c47bf65035e558208a0996d9977db26e60250" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.092822 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.155585 4611 scope.go:117] "RemoveContainer" containerID="50c6d6fbcacbc473633626cefd5f06aca330aed877d600da176b9c172dd4b32a" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.177247 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.206740 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.229360 4611 scope.go:117] "RemoveContainer" containerID="245cf191c2b0db624f95eaecf3ce3924c26650437e96f0b9ceaa53f75873dcd1" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.244614 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.245303 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-scripts\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.245498 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.246150 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c16aac44-b8d5-46c2-b439-9cda8aed610d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.246310 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-config-data\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.246380 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.246466 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-config-data-custom\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.246721 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52hhd\" (UniqueName: \"kubernetes.io/projected/c16aac44-b8d5-46c2-b439-9cda8aed610d-kube-api-access-52hhd\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.246830 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16aac44-b8d5-46c2-b439-9cda8aed610d-logs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.244674 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.263040 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.265353 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.265566 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.276978 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.348689 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c16aac44-b8d5-46c2-b439-9cda8aed610d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.348767 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.348779 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c16aac44-b8d5-46c2-b439-9cda8aed610d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.348785 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-config-data\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.349154 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-config-data-custom\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.349278 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52hhd\" (UniqueName: \"kubernetes.io/projected/c16aac44-b8d5-46c2-b439-9cda8aed610d-kube-api-access-52hhd\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.349357 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16aac44-b8d5-46c2-b439-9cda8aed610d-logs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.349447 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.349540 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-scripts\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.349685 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.350051 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16aac44-b8d5-46c2-b439-9cda8aed610d-logs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.357398 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.357559 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-config-data-custom\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.363080 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-scripts\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.365296 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.367747 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.369265 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16aac44-b8d5-46c2-b439-9cda8aed610d-config-data\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.372200 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52hhd\" (UniqueName: \"kubernetes.io/projected/c16aac44-b8d5-46c2-b439-9cda8aed610d-kube-api-access-52hhd\") pod \"cinder-api-0\" (UID: \"c16aac44-b8d5-46c2-b439-9cda8aed610d\") " pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.398765 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451604 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-run-httpd\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451674 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngv9t\" (UniqueName: \"kubernetes.io/projected/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-kube-api-access-ngv9t\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451720 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451746 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-scripts\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451824 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-log-httpd\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451864 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.451898 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-config-data\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.466444 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": read tcp 10.217.0.2:60952->10.217.0.165:9311: read: connection reset by peer" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.467051 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": dial tcp 10.217.0.165:9311: connect: connection refused" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.471047 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": read tcp 10.217.0.2:60962->10.217.0.165:9311: read: connection reset by peer" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.471383 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-666b746458-tsmhw" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": dial tcp 10.217.0.165:9311: connect: connection refused" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.553352 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.553687 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-scripts\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.553864 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-log-httpd\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.553912 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.553952 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-config-data\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.554091 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-run-httpd\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.554116 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngv9t\" (UniqueName: \"kubernetes.io/projected/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-kube-api-access-ngv9t\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.555017 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-log-httpd\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.557961 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-run-httpd\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.564737 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.568385 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.568440 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-config-data\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.581933 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-scripts\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.601614 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngv9t\" (UniqueName: \"kubernetes.io/projected/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-kube-api-access-ngv9t\") pod \"ceilometer-0\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " pod="openstack/ceilometer-0" Sep 29 13:00:20 crc kubenswrapper[4611]: I0929 13:00:20.888260 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.002155 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.082234 4611 generic.go:334] "Generic (PLEG): container finished" podID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerID="6bb6c96fb3568644c44a35022c823a692ebc02d386664360dffccf97aeebb165" exitCode=0 Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.082341 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-666b746458-tsmhw" event={"ID":"4831cfcd-d5b0-427b-8977-a6120aa09966","Type":"ContainerDied","Data":"6bb6c96fb3568644c44a35022c823a692ebc02d386664360dffccf97aeebb165"} Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.129191 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerStarted","Data":"c6aa15f2e771bb555ed0a0d2d6eb265e41f78d80a168a4a2e6d43155dae5b0ea"} Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.151312 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerStarted","Data":"d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9"} Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.300100 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.474031 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67hkn\" (UniqueName: \"kubernetes.io/projected/4831cfcd-d5b0-427b-8977-a6120aa09966-kube-api-access-67hkn\") pod \"4831cfcd-d5b0-427b-8977-a6120aa09966\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.474129 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-combined-ca-bundle\") pod \"4831cfcd-d5b0-427b-8977-a6120aa09966\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.474222 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4831cfcd-d5b0-427b-8977-a6120aa09966-logs\") pod \"4831cfcd-d5b0-427b-8977-a6120aa09966\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.474272 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data\") pod \"4831cfcd-d5b0-427b-8977-a6120aa09966\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.474327 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data-custom\") pod \"4831cfcd-d5b0-427b-8977-a6120aa09966\" (UID: \"4831cfcd-d5b0-427b-8977-a6120aa09966\") " Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.476467 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4831cfcd-d5b0-427b-8977-a6120aa09966-logs" (OuterVolumeSpecName: "logs") pod "4831cfcd-d5b0-427b-8977-a6120aa09966" (UID: "4831cfcd-d5b0-427b-8977-a6120aa09966"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.483953 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4831cfcd-d5b0-427b-8977-a6120aa09966" (UID: "4831cfcd-d5b0-427b-8977-a6120aa09966"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.487992 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4831cfcd-d5b0-427b-8977-a6120aa09966-kube-api-access-67hkn" (OuterVolumeSpecName: "kube-api-access-67hkn") pod "4831cfcd-d5b0-427b-8977-a6120aa09966" (UID: "4831cfcd-d5b0-427b-8977-a6120aa09966"). InnerVolumeSpecName "kube-api-access-67hkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.526848 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4831cfcd-d5b0-427b-8977-a6120aa09966" (UID: "4831cfcd-d5b0-427b-8977-a6120aa09966"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.577166 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data" (OuterVolumeSpecName: "config-data") pod "4831cfcd-d5b0-427b-8977-a6120aa09966" (UID: "4831cfcd-d5b0-427b-8977-a6120aa09966"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.579301 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4831cfcd-d5b0-427b-8977-a6120aa09966-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.579336 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.579348 4611 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.579363 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67hkn\" (UniqueName: \"kubernetes.io/projected/4831cfcd-d5b0-427b-8977-a6120aa09966-kube-api-access-67hkn\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.579374 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4831cfcd-d5b0-427b-8977-a6120aa09966-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.864016 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="179e93cd-b67b-4218-bf50-69fca7227443" path="/var/lib/kubelet/pods/179e93cd-b67b-4218-bf50-69fca7227443/volumes" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.865120 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62331da1-a2da-4934-b0bd-8cee7d29bdfb" path="/var/lib/kubelet/pods/62331da1-a2da-4934-b0bd-8cee7d29bdfb/volumes" Sep 29 13:00:21 crc kubenswrapper[4611]: I0929 13:00:21.880787 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.181105 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c16aac44-b8d5-46c2-b439-9cda8aed610d","Type":"ContainerStarted","Data":"2b1f057d303ffd20515106ae5b654feed7e9310d82253e111e7e750f218ce32f"} Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.185708 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerStarted","Data":"569c5869e74e4e3b49d5e931f06b6c3ae04fd4c1ad77ead923af8296185cfd7e"} Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.192152 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-666b746458-tsmhw" event={"ID":"4831cfcd-d5b0-427b-8977-a6120aa09966","Type":"ContainerDied","Data":"db75040bc352a3bd50b22f8146bce30d5b34a72a6aceb4863ecb9c5934277fa0"} Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.192240 4611 scope.go:117] "RemoveContainer" containerID="6bb6c96fb3568644c44a35022c823a692ebc02d386664360dffccf97aeebb165" Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.192464 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-666b746458-tsmhw" Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.224271 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-666b746458-tsmhw"] Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.231981 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-666b746458-tsmhw"] Sep 29 13:00:22 crc kubenswrapper[4611]: I0929 13:00:22.241653 4611 scope.go:117] "RemoveContainer" containerID="b129454ef4cb4f0c915cac9b668b958082da635a1afb8e2649ea113bccb23550" Sep 29 13:00:23 crc kubenswrapper[4611]: I0929 13:00:23.310316 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c16aac44-b8d5-46c2-b439-9cda8aed610d","Type":"ContainerStarted","Data":"f3d21d837f93c9c5e8c82ac1b9f3c122fe5b6cd1e838ebbf6ec8df0f74ee901d"} Sep 29 13:00:23 crc kubenswrapper[4611]: I0929 13:00:23.318226 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerStarted","Data":"dcf54d565f94165c1a3f1b0bcf2ac6f4610b015e68424f7a293e3e368bd52587"} Sep 29 13:00:23 crc kubenswrapper[4611]: I0929 13:00:23.318267 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerStarted","Data":"4e561103ba5a5c2e5e5c8ba27ecccb3e2b5570641cfb513200f2bb90a8fb2fe8"} Sep 29 13:00:23 crc kubenswrapper[4611]: I0929 13:00:23.513225 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-f475d4f88-9gwgs" Sep 29 13:00:23 crc kubenswrapper[4611]: I0929 13:00:23.524233 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-f475d4f88-9gwgs" Sep 29 13:00:23 crc kubenswrapper[4611]: I0929 13:00:23.748681 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" path="/var/lib/kubelet/pods/4831cfcd-d5b0-427b-8977-a6120aa09966/volumes" Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.330706 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c16aac44-b8d5-46c2-b439-9cda8aed610d","Type":"ContainerStarted","Data":"59ed249f93d6d62bdab4c02eb8246ea0f0dd84eeaa226e52cabe114e52d90f39"} Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.331080 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.334264 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerStarted","Data":"8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8"} Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.358391 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.358372987 podStartE2EDuration="4.358372987s" podCreationTimestamp="2025-09-29 13:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:24.352841318 +0000 UTC m=+1211.244360924" watchObservedRunningTime="2025-09-29 13:00:24.358372987 +0000 UTC m=+1211.249892593" Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.563976 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.648446 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69bc85cf77-brbf5"] Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.648781 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerName="dnsmasq-dns" containerID="cri-o://aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5" gracePeriod=10 Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.666666 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 13:00:24 crc kubenswrapper[4611]: I0929 13:00:24.751391 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.245100 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.367617 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-sb\") pod \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.368801 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c5k6\" (UniqueName: \"kubernetes.io/projected/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-kube-api-access-2c5k6\") pod \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.369545 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-svc\") pod \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.369856 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-nb\") pod \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.370024 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-config\") pod \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.370124 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-swift-storage-0\") pod \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\" (UID: \"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6\") " Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.371775 4611 generic.go:334] "Generic (PLEG): container finished" podID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerID="aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5" exitCode=0 Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.371845 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.371867 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" event={"ID":"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6","Type":"ContainerDied","Data":"aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5"} Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.371893 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69bc85cf77-brbf5" event={"ID":"2fc10b74-c3b2-4525-b1a2-dc4374ef96b6","Type":"ContainerDied","Data":"ee23fd65176f8efb113ff9a9915d102fa65bebef2f750a911c0faad21a4570a4"} Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.371909 4611 scope.go:117] "RemoveContainer" containerID="aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.376047 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-kube-api-access-2c5k6" (OuterVolumeSpecName: "kube-api-access-2c5k6") pod "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" (UID: "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6"). InnerVolumeSpecName "kube-api-access-2c5k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.402125 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="cinder-scheduler" containerID="cri-o://b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87" gracePeriod=30 Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.402908 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.403116 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="probe" containerID="cri-o://5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7" gracePeriod=30 Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.450807 4611 scope.go:117] "RemoveContainer" containerID="79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.453376 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" (UID: "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.464904 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.3640116 podStartE2EDuration="5.464884265s" podCreationTimestamp="2025-09-29 13:00:20 +0000 UTC" firstStartedPulling="2025-09-29 13:00:21.80709639 +0000 UTC m=+1208.698615996" lastFinishedPulling="2025-09-29 13:00:24.907969055 +0000 UTC m=+1211.799488661" observedRunningTime="2025-09-29 13:00:25.429583287 +0000 UTC m=+1212.321102893" watchObservedRunningTime="2025-09-29 13:00:25.464884265 +0000 UTC m=+1212.356403871" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.472822 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c5k6\" (UniqueName: \"kubernetes.io/projected/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-kube-api-access-2c5k6\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.472858 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.475220 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" (UID: "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.491210 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-config" (OuterVolumeSpecName: "config") pod "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" (UID: "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.509105 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" (UID: "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.511011 4611 scope.go:117] "RemoveContainer" containerID="aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5" Sep 29 13:00:25 crc kubenswrapper[4611]: E0929 13:00:25.511425 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5\": container with ID starting with aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5 not found: ID does not exist" containerID="aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.511463 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5"} err="failed to get container status \"aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5\": rpc error: code = NotFound desc = could not find container \"aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5\": container with ID starting with aca31de169b32c91a33ac995f0697233b650f43016e462697f4911f4ff54f3f5 not found: ID does not exist" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.511489 4611 scope.go:117] "RemoveContainer" containerID="79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9" Sep 29 13:00:25 crc kubenswrapper[4611]: E0929 13:00:25.511914 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9\": container with ID starting with 79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9 not found: ID does not exist" containerID="79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.511941 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9"} err="failed to get container status \"79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9\": rpc error: code = NotFound desc = could not find container \"79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9\": container with ID starting with 79dc3a8d28d6e9f1476d9818cc5dd9ac90861b56b221e4e6d15a222a29cfe0a9 not found: ID does not exist" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.535647 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" (UID: "2fc10b74-c3b2-4525-b1a2-dc4374ef96b6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.574270 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.574477 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.574538 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.574607 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.702315 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69bc85cf77-brbf5"] Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.710645 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69bc85cf77-brbf5"] Sep 29 13:00:25 crc kubenswrapper[4611]: I0929 13:00:25.748998 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" path="/var/lib/kubelet/pods/2fc10b74-c3b2-4525-b1a2-dc4374ef96b6/volumes" Sep 29 13:00:26 crc kubenswrapper[4611]: I0929 13:00:26.411509 4611 generic.go:334] "Generic (PLEG): container finished" podID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerID="5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7" exitCode=0 Sep 29 13:00:26 crc kubenswrapper[4611]: I0929 13:00:26.411568 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1d77d963-760c-4b7b-82e6-51ea53e9daa6","Type":"ContainerDied","Data":"5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7"} Sep 29 13:00:26 crc kubenswrapper[4611]: I0929 13:00:26.416440 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerStarted","Data":"a55dc107d64a6e4d7996b582dd0578336753c797829655495e33960dabdb0652"} Sep 29 13:00:26 crc kubenswrapper[4611]: I0929 13:00:26.736940 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-84bd74d746-h92xg" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.335727 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.427467 4611 generic.go:334] "Generic (PLEG): container finished" podID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerID="b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87" exitCode=0 Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.427512 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.427534 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1d77d963-760c-4b7b-82e6-51ea53e9daa6","Type":"ContainerDied","Data":"b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87"} Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.428013 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1d77d963-760c-4b7b-82e6-51ea53e9daa6","Type":"ContainerDied","Data":"1dfd736215bc09375c02d2768d37d67344b63f7e48daab6ae6c454f6ea514707"} Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.428041 4611 scope.go:117] "RemoveContainer" containerID="5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.461430 4611 scope.go:117] "RemoveContainer" containerID="b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.488203 4611 scope.go:117] "RemoveContainer" containerID="5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.488835 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7\": container with ID starting with 5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7 not found: ID does not exist" containerID="5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.488984 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7"} err="failed to get container status \"5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7\": rpc error: code = NotFound desc = could not find container \"5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7\": container with ID starting with 5da4e4c43afcd32d2e1aee53f2c7dc0ecff95851c19ff68f4dc6cb315214d5a7 not found: ID does not exist" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.489078 4611 scope.go:117] "RemoveContainer" containerID="b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.489457 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87\": container with ID starting with b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87 not found: ID does not exist" containerID="b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.489489 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87"} err="failed to get container status \"b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87\": rpc error: code = NotFound desc = could not find container \"b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87\": container with ID starting with b1f7c7ed4bba630791b7c1f3e374a39caa52aa76dade76119ae81d8d2e7c8e87 not found: ID does not exist" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.513349 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-combined-ca-bundle\") pod \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.513645 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-scripts\") pod \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.513854 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data\") pod \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.514235 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lrc4\" (UniqueName: \"kubernetes.io/projected/1d77d963-760c-4b7b-82e6-51ea53e9daa6-kube-api-access-9lrc4\") pod \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.514333 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d77d963-760c-4b7b-82e6-51ea53e9daa6-etc-machine-id\") pod \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.514392 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data-custom\") pod \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\" (UID: \"1d77d963-760c-4b7b-82e6-51ea53e9daa6\") " Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.514701 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d77d963-760c-4b7b-82e6-51ea53e9daa6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1d77d963-760c-4b7b-82e6-51ea53e9daa6" (UID: "1d77d963-760c-4b7b-82e6-51ea53e9daa6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.515061 4611 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d77d963-760c-4b7b-82e6-51ea53e9daa6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.521569 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-scripts" (OuterVolumeSpecName: "scripts") pod "1d77d963-760c-4b7b-82e6-51ea53e9daa6" (UID: "1d77d963-760c-4b7b-82e6-51ea53e9daa6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.522756 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d77d963-760c-4b7b-82e6-51ea53e9daa6-kube-api-access-9lrc4" (OuterVolumeSpecName: "kube-api-access-9lrc4") pod "1d77d963-760c-4b7b-82e6-51ea53e9daa6" (UID: "1d77d963-760c-4b7b-82e6-51ea53e9daa6"). InnerVolumeSpecName "kube-api-access-9lrc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.522756 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1d77d963-760c-4b7b-82e6-51ea53e9daa6" (UID: "1d77d963-760c-4b7b-82e6-51ea53e9daa6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.600988 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d77d963-760c-4b7b-82e6-51ea53e9daa6" (UID: "1d77d963-760c-4b7b-82e6-51ea53e9daa6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.615999 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lrc4\" (UniqueName: \"kubernetes.io/projected/1d77d963-760c-4b7b-82e6-51ea53e9daa6-kube-api-access-9lrc4\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.616030 4611 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.616038 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.616047 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.642303 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data" (OuterVolumeSpecName: "config-data") pod "1d77d963-760c-4b7b-82e6-51ea53e9daa6" (UID: "1d77d963-760c-4b7b-82e6-51ea53e9daa6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.717137 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d77d963-760c-4b7b-82e6-51ea53e9daa6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.809568 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.824765 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.858826 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.859239 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerName="dnsmasq-dns" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859254 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerName="dnsmasq-dns" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.859299 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859305 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.859317 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859323 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.859336 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="cinder-scheduler" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859342 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="cinder-scheduler" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.859353 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerName="init" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859359 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerName="init" Sep 29 13:00:27 crc kubenswrapper[4611]: E0929 13:00:27.859369 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="probe" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859375 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="probe" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859534 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859553 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="4831cfcd-d5b0-427b-8977-a6120aa09966" containerName="barbican-api-log" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859566 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="probe" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859580 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fc10b74-c3b2-4525-b1a2-dc4374ef96b6" containerName="dnsmasq-dns" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.859591 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" containerName="cinder-scheduler" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.860477 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.867722 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 13:00:27 crc kubenswrapper[4611]: I0929 13:00:27.868280 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.023702 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.023759 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-config-data\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.023819 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-scripts\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.023838 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvwc4\" (UniqueName: \"kubernetes.io/projected/998e498c-f720-44eb-9a17-7c13a2dd5b70-kube-api-access-bvwc4\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.023866 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.023952 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/998e498c-f720-44eb-9a17-7c13a2dd5b70-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141006 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/998e498c-f720-44eb-9a17-7c13a2dd5b70-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141087 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141128 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-config-data\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141201 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-scripts\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141228 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvwc4\" (UniqueName: \"kubernetes.io/projected/998e498c-f720-44eb-9a17-7c13a2dd5b70-kube-api-access-bvwc4\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141229 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/998e498c-f720-44eb-9a17-7c13a2dd5b70-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.141265 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.156256 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-config-data\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.156616 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.162020 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-scripts\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.164119 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/998e498c-f720-44eb-9a17-7c13a2dd5b70-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.185208 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvwc4\" (UniqueName: \"kubernetes.io/projected/998e498c-f720-44eb-9a17-7c13a2dd5b70-kube-api-access-bvwc4\") pod \"cinder-scheduler-0\" (UID: \"998e498c-f720-44eb-9a17-7c13a2dd5b70\") " pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.466524 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.466578 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.477470 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.562423 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:00:28 crc kubenswrapper[4611]: I0929 13:00:28.562805 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:00:29 crc kubenswrapper[4611]: I0929 13:00:29.067772 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 13:00:29 crc kubenswrapper[4611]: I0929 13:00:29.453447 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"998e498c-f720-44eb-9a17-7c13a2dd5b70","Type":"ContainerStarted","Data":"f8f8ed927ea147a0abc9cd57046e1d312864002eafbff89600dcd1be70f34975"} Sep 29 13:00:29 crc kubenswrapper[4611]: I0929 13:00:29.776646 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d77d963-760c-4b7b-82e6-51ea53e9daa6" path="/var/lib/kubelet/pods/1d77d963-760c-4b7b-82e6-51ea53e9daa6/volumes" Sep 29 13:00:30 crc kubenswrapper[4611]: I0929 13:00:30.472959 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"998e498c-f720-44eb-9a17-7c13a2dd5b70","Type":"ContainerStarted","Data":"c4d488147d5274e4a29206cb38533165845d61eff4ec26cdc2bbdcc438c4eb75"} Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.484226 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"998e498c-f720-44eb-9a17-7c13a2dd5b70","Type":"ContainerStarted","Data":"4c3eac1daf10a820d8779b8dceb821cf3763e11b4eb744714864340c1911b89f"} Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.511837 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.511820292 podStartE2EDuration="4.511820292s" podCreationTimestamp="2025-09-29 13:00:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:31.505292534 +0000 UTC m=+1218.396812150" watchObservedRunningTime="2025-09-29 13:00:31.511820292 +0000 UTC m=+1218.403339898" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.810695 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.812027 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.814471 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.814606 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-8sfxn" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.834370 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.870487 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.938728 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.938823 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld5nl\" (UniqueName: \"kubernetes.io/projected/900d519c-288a-4341-911f-e429cbddfd5b-kube-api-access-ld5nl\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.938892 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config-secret\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:31 crc kubenswrapper[4611]: I0929 13:00:31.938920 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.040678 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld5nl\" (UniqueName: \"kubernetes.io/projected/900d519c-288a-4341-911f-e429cbddfd5b-kube-api-access-ld5nl\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.040782 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config-secret\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.040822 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.040915 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.042835 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.049441 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.058471 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config-secret\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.071195 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld5nl\" (UniqueName: \"kubernetes.io/projected/900d519c-288a-4341-911f-e429cbddfd5b-kube-api-access-ld5nl\") pod \"openstackclient\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.155783 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:00:32 crc kubenswrapper[4611]: I0929 13:00:32.787527 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:00:33 crc kubenswrapper[4611]: I0929 13:00:33.480078 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 13:00:33 crc kubenswrapper[4611]: I0929 13:00:33.534930 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"900d519c-288a-4341-911f-e429cbddfd5b","Type":"ContainerStarted","Data":"2f445c6d178fd0768d42ac3946c2709766761724a1028df5b6d749fa7ea6edca"} Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.405893 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="c16aac44-b8d5-46c2-b439-9cda8aed610d" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.171:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.628325 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.629488 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.629677 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.630444 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"adbfe6821ab82328582fac5fae1e1a588692912e18a6cfaa37c8967ac7e74a78"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.630561 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://adbfe6821ab82328582fac5fae1e1a588692912e18a6cfaa37c8967ac7e74a78" gracePeriod=600 Sep 29 13:00:34 crc kubenswrapper[4611]: I0929 13:00:34.836721 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 13:00:35 crc kubenswrapper[4611]: I0929 13:00:35.603767 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="adbfe6821ab82328582fac5fae1e1a588692912e18a6cfaa37c8967ac7e74a78" exitCode=0 Sep 29 13:00:35 crc kubenswrapper[4611]: I0929 13:00:35.603828 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"adbfe6821ab82328582fac5fae1e1a588692912e18a6cfaa37c8967ac7e74a78"} Sep 29 13:00:35 crc kubenswrapper[4611]: I0929 13:00:35.604037 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"7942f9fe9e1c29e9bc5facddd920c692e815332925345c3fdce8ee2caca74f90"} Sep 29 13:00:35 crc kubenswrapper[4611]: I0929 13:00:35.604067 4611 scope.go:117] "RemoveContainer" containerID="5dabec6d4a1f56079556aa2416ffa504eeb7f0aa06b802b890dac62cf28cc40d" Sep 29 13:00:38 crc kubenswrapper[4611]: I0929 13:00:38.468140 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Sep 29 13:00:38 crc kubenswrapper[4611]: I0929 13:00:38.564272 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Sep 29 13:00:38 crc kubenswrapper[4611]: I0929 13:00:38.837598 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.593727 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-54fd444d4f-vmksq"] Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.596055 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.599499 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.599808 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.624611 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.629848 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf23ea05-4538-4fed-bb3d-07d009f400bd-run-httpd\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.629923 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf23ea05-4538-4fed-bb3d-07d009f400bd-log-httpd\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.629947 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-public-tls-certs\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.629971 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-config-data\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.629996 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf23ea05-4538-4fed-bb3d-07d009f400bd-etc-swift\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.630064 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-combined-ca-bundle\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.630092 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6stg\" (UniqueName: \"kubernetes.io/projected/cf23ea05-4538-4fed-bb3d-07d009f400bd-kube-api-access-v6stg\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.630112 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-internal-tls-certs\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.630930 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-54fd444d4f-vmksq"] Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.731122 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf23ea05-4538-4fed-bb3d-07d009f400bd-etc-swift\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.731461 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-combined-ca-bundle\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.731716 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6stg\" (UniqueName: \"kubernetes.io/projected/cf23ea05-4538-4fed-bb3d-07d009f400bd-kube-api-access-v6stg\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.731853 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-internal-tls-certs\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.731964 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf23ea05-4538-4fed-bb3d-07d009f400bd-run-httpd\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.732071 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf23ea05-4538-4fed-bb3d-07d009f400bd-log-httpd\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.732172 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-public-tls-certs\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.732275 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-config-data\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.733273 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf23ea05-4538-4fed-bb3d-07d009f400bd-log-httpd\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.733400 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf23ea05-4538-4fed-bb3d-07d009f400bd-run-httpd\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.740237 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf23ea05-4538-4fed-bb3d-07d009f400bd-etc-swift\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.737614 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-public-tls-certs\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.741488 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-combined-ca-bundle\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.746453 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-internal-tls-certs\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.746726 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf23ea05-4538-4fed-bb3d-07d009f400bd-config-data\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.760149 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6stg\" (UniqueName: \"kubernetes.io/projected/cf23ea05-4538-4fed-bb3d-07d009f400bd-kube-api-access-v6stg\") pod \"swift-proxy-54fd444d4f-vmksq\" (UID: \"cf23ea05-4538-4fed-bb3d-07d009f400bd\") " pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:40 crc kubenswrapper[4611]: I0929 13:00:40.919305 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.470510 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-fvkzh"] Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.478461 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.530250 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-fvkzh"] Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.558106 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9b6p\" (UniqueName: \"kubernetes.io/projected/9ede43e6-1802-4389-8a47-ae78b16d3144-kube-api-access-k9b6p\") pod \"nova-api-db-create-fvkzh\" (UID: \"9ede43e6-1802-4389-8a47-ae78b16d3144\") " pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.655426 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-655h7"] Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.656737 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.664370 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9b6p\" (UniqueName: \"kubernetes.io/projected/9ede43e6-1802-4389-8a47-ae78b16d3144-kube-api-access-k9b6p\") pod \"nova-api-db-create-fvkzh\" (UID: \"9ede43e6-1802-4389-8a47-ae78b16d3144\") " pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.734485 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9b6p\" (UniqueName: \"kubernetes.io/projected/9ede43e6-1802-4389-8a47-ae78b16d3144-kube-api-access-k9b6p\") pod \"nova-api-db-create-fvkzh\" (UID: \"9ede43e6-1802-4389-8a47-ae78b16d3144\") " pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.769838 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf45l\" (UniqueName: \"kubernetes.io/projected/abe4e13c-dc14-4649-896d-23ad11daedd5-kube-api-access-cf45l\") pod \"nova-cell0-db-create-655h7\" (UID: \"abe4e13c-dc14-4649-896d-23ad11daedd5\") " pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.778609 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-5mdd9"] Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.780056 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.806031 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-655h7"] Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.808021 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.873709 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9cds\" (UniqueName: \"kubernetes.io/projected/d0597e5b-7c6b-465d-b298-0f72aa28e514-kube-api-access-w9cds\") pod \"nova-cell1-db-create-5mdd9\" (UID: \"d0597e5b-7c6b-465d-b298-0f72aa28e514\") " pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.873873 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf45l\" (UniqueName: \"kubernetes.io/projected/abe4e13c-dc14-4649-896d-23ad11daedd5-kube-api-access-cf45l\") pod \"nova-cell0-db-create-655h7\" (UID: \"abe4e13c-dc14-4649-896d-23ad11daedd5\") " pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.921381 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf45l\" (UniqueName: \"kubernetes.io/projected/abe4e13c-dc14-4649-896d-23ad11daedd5-kube-api-access-cf45l\") pod \"nova-cell0-db-create-655h7\" (UID: \"abe4e13c-dc14-4649-896d-23ad11daedd5\") " pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.933032 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5mdd9"] Sep 29 13:00:41 crc kubenswrapper[4611]: I0929 13:00:41.977605 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9cds\" (UniqueName: \"kubernetes.io/projected/d0597e5b-7c6b-465d-b298-0f72aa28e514-kube-api-access-w9cds\") pod \"nova-cell1-db-create-5mdd9\" (UID: \"d0597e5b-7c6b-465d-b298-0f72aa28e514\") " pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.018337 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9cds\" (UniqueName: \"kubernetes.io/projected/d0597e5b-7c6b-465d-b298-0f72aa28e514-kube-api-access-w9cds\") pod \"nova-cell1-db-create-5mdd9\" (UID: \"d0597e5b-7c6b-465d-b298-0f72aa28e514\") " pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.026968 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.118054 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.167246 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-54fd444d4f-vmksq"] Sep 29 13:00:42 crc kubenswrapper[4611]: W0929 13:00:42.214965 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf23ea05_4538_4fed_bb3d_07d009f400bd.slice/crio-dbcebc91b90601b9adf418396f7cebed9c999af5b6f8c8d96b36921b2902bc42 WatchSource:0}: Error finding container dbcebc91b90601b9adf418396f7cebed9c999af5b6f8c8d96b36921b2902bc42: Status 404 returned error can't find the container with id dbcebc91b90601b9adf418396f7cebed9c999af5b6f8c8d96b36921b2902bc42 Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.557507 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-655h7"] Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.630425 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-fvkzh"] Sep 29 13:00:42 crc kubenswrapper[4611]: W0929 13:00:42.638765 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ede43e6_1802_4389_8a47_ae78b16d3144.slice/crio-3931ad46a0a49907d347388e3962305752ba813a1a2f7dbc4952f39bbaf53c65 WatchSource:0}: Error finding container 3931ad46a0a49907d347388e3962305752ba813a1a2f7dbc4952f39bbaf53c65: Status 404 returned error can't find the container with id 3931ad46a0a49907d347388e3962305752ba813a1a2f7dbc4952f39bbaf53c65 Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.711821 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-54fd444d4f-vmksq" event={"ID":"cf23ea05-4538-4fed-bb3d-07d009f400bd","Type":"ContainerStarted","Data":"dbcebc91b90601b9adf418396f7cebed9c999af5b6f8c8d96b36921b2902bc42"} Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.721854 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-655h7" event={"ID":"abe4e13c-dc14-4649-896d-23ad11daedd5","Type":"ContainerStarted","Data":"21109cfc244b4def02eedf635c112bf29de41c790e5c8ccb62d24dfa568dcfb1"} Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.730304 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-fvkzh" event={"ID":"9ede43e6-1802-4389-8a47-ae78b16d3144","Type":"ContainerStarted","Data":"3931ad46a0a49907d347388e3962305752ba813a1a2f7dbc4952f39bbaf53c65"} Sep 29 13:00:42 crc kubenswrapper[4611]: I0929 13:00:42.882049 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5mdd9"] Sep 29 13:00:42 crc kubenswrapper[4611]: W0929 13:00:42.893935 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0597e5b_7c6b_465d_b298_0f72aa28e514.slice/crio-27f6667fa35ded8edb99212e3dc94db28c7b45b7f7749ee24244862b64974106 WatchSource:0}: Error finding container 27f6667fa35ded8edb99212e3dc94db28c7b45b7f7749ee24244862b64974106: Status 404 returned error can't find the container with id 27f6667fa35ded8edb99212e3dc94db28c7b45b7f7749ee24244862b64974106 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.144443 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.149072 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-central-agent" containerID="cri-o://4e561103ba5a5c2e5e5c8ba27ecccb3e2b5570641cfb513200f2bb90a8fb2fe8" gracePeriod=30 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.149984 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="proxy-httpd" containerID="cri-o://a55dc107d64a6e4d7996b582dd0578336753c797829655495e33960dabdb0652" gracePeriod=30 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.150078 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-notification-agent" containerID="cri-o://dcf54d565f94165c1a3f1b0bcf2ac6f4610b015e68424f7a293e3e368bd52587" gracePeriod=30 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.150113 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="sg-core" containerID="cri-o://8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8" gracePeriod=30 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.177393 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.172:3000/\": EOF" Sep 29 13:00:43 crc kubenswrapper[4611]: E0929 13:00:43.485144 4611 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bb167da_c709_4fc8_bb64_1e54b8e26cdb.slice/crio-conmon-8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bb167da_c709_4fc8_bb64_1e54b8e26cdb.slice/crio-a55dc107d64a6e4d7996b582dd0578336753c797829655495e33960dabdb0652.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bb167da_c709_4fc8_bb64_1e54b8e26cdb.slice/crio-8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8.scope\": RecentStats: unable to find data in memory cache]" Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.751881 4611 generic.go:334] "Generic (PLEG): container finished" podID="9ede43e6-1802-4389-8a47-ae78b16d3144" containerID="0194ea8eacd6e3968d8010c9ecd3ecd3282d128c3576e8b3345cf6486f58d25d" exitCode=0 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.753124 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-fvkzh" event={"ID":"9ede43e6-1802-4389-8a47-ae78b16d3144","Type":"ContainerDied","Data":"0194ea8eacd6e3968d8010c9ecd3ecd3282d128c3576e8b3345cf6486f58d25d"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.756363 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5mdd9" event={"ID":"d0597e5b-7c6b-465d-b298-0f72aa28e514","Type":"ContainerStarted","Data":"27f6667fa35ded8edb99212e3dc94db28c7b45b7f7749ee24244862b64974106"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.761665 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-54fd444d4f-vmksq" event={"ID":"cf23ea05-4538-4fed-bb3d-07d009f400bd","Type":"ContainerStarted","Data":"281597eb8ef047d87ccc6325a18ac3ad3ee54fbbad54e607b1376e238966f096"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.761737 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-54fd444d4f-vmksq" event={"ID":"cf23ea05-4538-4fed-bb3d-07d009f400bd","Type":"ContainerStarted","Data":"39540449dcf8b2c8d39183e7ca93c351ed7152014120b35cfbdfd81c33b7c7b2"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.761843 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.761865 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.767954 4611 generic.go:334] "Generic (PLEG): container finished" podID="abe4e13c-dc14-4649-896d-23ad11daedd5" containerID="c388c83e45b33aee3b2838adb7d33d73d56bf833acff7dabbadf8a1e06805fcd" exitCode=0 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.768376 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-655h7" event={"ID":"abe4e13c-dc14-4649-896d-23ad11daedd5","Type":"ContainerDied","Data":"c388c83e45b33aee3b2838adb7d33d73d56bf833acff7dabbadf8a1e06805fcd"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.784233 4611 generic.go:334] "Generic (PLEG): container finished" podID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerID="a55dc107d64a6e4d7996b582dd0578336753c797829655495e33960dabdb0652" exitCode=0 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.784274 4611 generic.go:334] "Generic (PLEG): container finished" podID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerID="8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8" exitCode=2 Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.784304 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerDied","Data":"a55dc107d64a6e4d7996b582dd0578336753c797829655495e33960dabdb0652"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.784333 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerDied","Data":"8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8"} Sep 29 13:00:43 crc kubenswrapper[4611]: I0929 13:00:43.913673 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-54fd444d4f-vmksq" podStartSLOduration=3.9136547779999997 podStartE2EDuration="3.913654778s" podCreationTimestamp="2025-09-29 13:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:43.907589193 +0000 UTC m=+1230.799108799" watchObservedRunningTime="2025-09-29 13:00:43.913654778 +0000 UTC m=+1230.805174384" Sep 29 13:00:44 crc kubenswrapper[4611]: I0929 13:00:44.797183 4611 generic.go:334] "Generic (PLEG): container finished" podID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerID="4e561103ba5a5c2e5e5c8ba27ecccb3e2b5570641cfb513200f2bb90a8fb2fe8" exitCode=0 Sep 29 13:00:44 crc kubenswrapper[4611]: I0929 13:00:44.797310 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerDied","Data":"4e561103ba5a5c2e5e5c8ba27ecccb3e2b5570641cfb513200f2bb90a8fb2fe8"} Sep 29 13:00:47 crc kubenswrapper[4611]: I0929 13:00:47.836379 4611 generic.go:334] "Generic (PLEG): container finished" podID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerID="dcf54d565f94165c1a3f1b0bcf2ac6f4610b015e68424f7a293e3e368bd52587" exitCode=0 Sep 29 13:00:47 crc kubenswrapper[4611]: I0929 13:00:47.836428 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerDied","Data":"dcf54d565f94165c1a3f1b0bcf2ac6f4610b015e68424f7a293e3e368bd52587"} Sep 29 13:00:48 crc kubenswrapper[4611]: I0929 13:00:48.075588 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 13:00:48 crc kubenswrapper[4611]: I0929 13:00:48.075872 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-log" containerID="cri-o://8ff2a9cb06cc53485bc66968bad04c7630d353190ab6e69e2516420fd3f9e5d1" gracePeriod=30 Sep 29 13:00:48 crc kubenswrapper[4611]: I0929 13:00:48.075957 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-httpd" containerID="cri-o://088f88043c2c3d9ba57ea71f288395d013f2f40f3b12e33af2a09aed6da4c69a" gracePeriod=30 Sep 29 13:00:48 crc kubenswrapper[4611]: I0929 13:00:48.871777 4611 generic.go:334] "Generic (PLEG): container finished" podID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerID="8ff2a9cb06cc53485bc66968bad04c7630d353190ab6e69e2516420fd3f9e5d1" exitCode=143 Sep 29 13:00:48 crc kubenswrapper[4611]: I0929 13:00:48.871975 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a9339db1-8dcb-4435-94f5-ac29a7ae99a0","Type":"ContainerDied","Data":"8ff2a9cb06cc53485bc66968bad04c7630d353190ab6e69e2516420fd3f9e5d1"} Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.857045 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.862360 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-httpd" containerID="cri-o://5d71ea815ea44b121494615ef8733654a74798a7d69d693916dd0367cc2776c1" gracePeriod=30 Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.862245 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-log" containerID="cri-o://cdd5f8b20832ce876cb2582356fe5b5b757d58bd4fb563981d1946788e6c232f" gracePeriod=30 Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.890108 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.172:3000/\": dial tcp 10.217.0.172:3000: connect: connection refused" Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.917372 4611 generic.go:334] "Generic (PLEG): container finished" podID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerID="088f88043c2c3d9ba57ea71f288395d013f2f40f3b12e33af2a09aed6da4c69a" exitCode=0 Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.917419 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a9339db1-8dcb-4435-94f5-ac29a7ae99a0","Type":"ContainerDied","Data":"088f88043c2c3d9ba57ea71f288395d013f2f40f3b12e33af2a09aed6da4c69a"} Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.936763 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:50 crc kubenswrapper[4611]: I0929 13:00:50.937049 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-54fd444d4f-vmksq" Sep 29 13:00:51 crc kubenswrapper[4611]: I0929 13:00:51.509855 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.155:9292/healthcheck\": dial tcp 10.217.0.155:9292: connect: connection refused" Sep 29 13:00:51 crc kubenswrapper[4611]: I0929 13:00:51.509873 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.155:9292/healthcheck\": dial tcp 10.217.0.155:9292: connect: connection refused" Sep 29 13:00:51 crc kubenswrapper[4611]: I0929 13:00:51.934873 4611 generic.go:334] "Generic (PLEG): container finished" podID="51a74707-06e6-48d9-8636-a921a4a559e6" containerID="cdd5f8b20832ce876cb2582356fe5b5b757d58bd4fb563981d1946788e6c232f" exitCode=143 Sep 29 13:00:51 crc kubenswrapper[4611]: I0929 13:00:51.934920 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"51a74707-06e6-48d9-8636-a921a4a559e6","Type":"ContainerDied","Data":"cdd5f8b20832ce876cb2582356fe5b5b757d58bd4fb563981d1946788e6c232f"} Sep 29 13:00:52 crc kubenswrapper[4611]: I0929 13:00:52.557741 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.156:9292/healthcheck\": read tcp 10.217.0.2:41292->10.217.0.156:9292: read: connection reset by peer" Sep 29 13:00:52 crc kubenswrapper[4611]: I0929 13:00:52.557743 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.156:9292/healthcheck\": read tcp 10.217.0.2:41306->10.217.0.156:9292: read: connection reset by peer" Sep 29 13:00:52 crc kubenswrapper[4611]: I0929 13:00:52.948707 4611 generic.go:334] "Generic (PLEG): container finished" podID="51a74707-06e6-48d9-8636-a921a4a559e6" containerID="5d71ea815ea44b121494615ef8733654a74798a7d69d693916dd0367cc2776c1" exitCode=0 Sep 29 13:00:52 crc kubenswrapper[4611]: I0929 13:00:52.948766 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"51a74707-06e6-48d9-8636-a921a4a559e6","Type":"ContainerDied","Data":"5d71ea815ea44b121494615ef8733654a74798a7d69d693916dd0367cc2776c1"} Sep 29 13:00:53 crc kubenswrapper[4611]: I0929 13:00:53.470993 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:53 crc kubenswrapper[4611]: I0929 13:00:53.567494 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:00:53 crc kubenswrapper[4611]: E0929 13:00:53.854732 4611 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-openstackclient:current-tested" Sep 29 13:00:53 crc kubenswrapper[4611]: E0929 13:00:53.855003 4611 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-openstackclient:current-tested" Sep 29 13:00:53 crc kubenswrapper[4611]: E0929 13:00:53.855120 4611 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.rdoproject.org/podified-master-centos10/openstack-openstackclient:current-tested,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5fdh54fh5ddh5dbhb6h665h68chb7h9dh5fbh597h64dh74h5d4hfdh55dh5f4h58bhf5h657h5ch5c7h95h55dh544h5c7h65fh89h54h7dh545h5c7q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ld5nl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(900d519c-288a-4341-911f-e429cbddfd5b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 13:00:53 crc kubenswrapper[4611]: E0929 13:00:53.856403 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="900d519c-288a-4341-911f-e429cbddfd5b" Sep 29 13:00:53 crc kubenswrapper[4611]: I0929 13:00:53.962232 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-655h7" event={"ID":"abe4e13c-dc14-4649-896d-23ad11daedd5","Type":"ContainerDied","Data":"21109cfc244b4def02eedf635c112bf29de41c790e5c8ccb62d24dfa568dcfb1"} Sep 29 13:00:53 crc kubenswrapper[4611]: I0929 13:00:53.962269 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21109cfc244b4def02eedf635c112bf29de41c790e5c8ccb62d24dfa568dcfb1" Sep 29 13:00:53 crc kubenswrapper[4611]: I0929 13:00:53.964702 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-fvkzh" event={"ID":"9ede43e6-1802-4389-8a47-ae78b16d3144","Type":"ContainerDied","Data":"3931ad46a0a49907d347388e3962305752ba813a1a2f7dbc4952f39bbaf53c65"} Sep 29 13:00:53 crc kubenswrapper[4611]: I0929 13:00:53.964729 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3931ad46a0a49907d347388e3962305752ba813a1a2f7dbc4952f39bbaf53c65" Sep 29 13:00:53 crc kubenswrapper[4611]: E0929 13:00:53.966808 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-openstackclient:current-tested\\\"\"" pod="openstack/openstackclient" podUID="900d519c-288a-4341-911f-e429cbddfd5b" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.015668 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.027523 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.145545 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9b6p\" (UniqueName: \"kubernetes.io/projected/9ede43e6-1802-4389-8a47-ae78b16d3144-kube-api-access-k9b6p\") pod \"9ede43e6-1802-4389-8a47-ae78b16d3144\" (UID: \"9ede43e6-1802-4389-8a47-ae78b16d3144\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.145613 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf45l\" (UniqueName: \"kubernetes.io/projected/abe4e13c-dc14-4649-896d-23ad11daedd5-kube-api-access-cf45l\") pod \"abe4e13c-dc14-4649-896d-23ad11daedd5\" (UID: \"abe4e13c-dc14-4649-896d-23ad11daedd5\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.159120 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ede43e6-1802-4389-8a47-ae78b16d3144-kube-api-access-k9b6p" (OuterVolumeSpecName: "kube-api-access-k9b6p") pod "9ede43e6-1802-4389-8a47-ae78b16d3144" (UID: "9ede43e6-1802-4389-8a47-ae78b16d3144"). InnerVolumeSpecName "kube-api-access-k9b6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.188354 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abe4e13c-dc14-4649-896d-23ad11daedd5-kube-api-access-cf45l" (OuterVolumeSpecName: "kube-api-access-cf45l") pod "abe4e13c-dc14-4649-896d-23ad11daedd5" (UID: "abe4e13c-dc14-4649-896d-23ad11daedd5"). InnerVolumeSpecName "kube-api-access-cf45l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.247896 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9b6p\" (UniqueName: \"kubernetes.io/projected/9ede43e6-1802-4389-8a47-ae78b16d3144-kube-api-access-k9b6p\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.247933 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf45l\" (UniqueName: \"kubernetes.io/projected/abe4e13c-dc14-4649-896d-23ad11daedd5-kube-api-access-cf45l\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.474151 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.639419 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.660731 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-config-data\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.660780 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-log-httpd\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.660803 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-run-httpd\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.660862 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngv9t\" (UniqueName: \"kubernetes.io/projected/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-kube-api-access-ngv9t\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.660912 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-scripts\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.661052 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-combined-ca-bundle\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.661076 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-sg-core-conf-yaml\") pod \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\" (UID: \"6bb167da-c709-4fc8-bb64-1e54b8e26cdb\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.665328 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.669585 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.720311 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-kube-api-access-ngv9t" (OuterVolumeSpecName: "kube-api-access-ngv9t") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "kube-api-access-ngv9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.729712 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-scripts" (OuterVolumeSpecName: "scripts") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.769710 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-httpd-run\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.769788 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-logs\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.769848 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-config-data\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770061 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6p79d\" (UniqueName: \"kubernetes.io/projected/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-kube-api-access-6p79d\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770094 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770128 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-combined-ca-bundle\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770146 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-scripts\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770204 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-public-tls-certs\") pod \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\" (UID: \"a9339db1-8dcb-4435-94f5-ac29a7ae99a0\") " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770367 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770546 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770565 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770575 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngv9t\" (UniqueName: \"kubernetes.io/projected/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-kube-api-access-ngv9t\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770585 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.770593 4611 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.783993 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.785091 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-logs" (OuterVolumeSpecName: "logs") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.796616 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-scripts" (OuterVolumeSpecName: "scripts") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.806719 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.807207 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-kube-api-access-6p79d" (OuterVolumeSpecName: "kube-api-access-6p79d") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "kube-api-access-6p79d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.874493 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.874545 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6p79d\" (UniqueName: \"kubernetes.io/projected/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-kube-api-access-6p79d\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.874574 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.874587 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.874636 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.917520 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.924358 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.964760 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.965659 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-config-data" (OuterVolumeSpecName: "config-data") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.976767 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.976824 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.976838 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.976849 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.993739 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.993740 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a9339db1-8dcb-4435-94f5-ac29a7ae99a0","Type":"ContainerDied","Data":"a1f860fa94da5ad692353d8f5c0db4a27c58e1e8bcfac88c96a78fc3041acb7c"} Sep 29 13:00:54 crc kubenswrapper[4611]: I0929 13:00:54.993943 4611 scope.go:117] "RemoveContainer" containerID="088f88043c2c3d9ba57ea71f288395d013f2f40f3b12e33af2a09aed6da4c69a" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.001225 4611 generic.go:334] "Generic (PLEG): container finished" podID="d0597e5b-7c6b-465d-b298-0f72aa28e514" containerID="290a48b9235c62eb9ce12006d08061fbf254a3a7fa9c3886aa765f4485da33ca" exitCode=0 Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.001318 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5mdd9" event={"ID":"d0597e5b-7c6b-465d-b298-0f72aa28e514","Type":"ContainerDied","Data":"290a48b9235c62eb9ce12006d08061fbf254a3a7fa9c3886aa765f4485da33ca"} Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.007295 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"51a74707-06e6-48d9-8636-a921a4a559e6","Type":"ContainerDied","Data":"5ad54c319e9724641d037396bcd99698330f30231353e15ad276411218349c6c"} Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.007369 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ad54c319e9724641d037396bcd99698330f30231353e15ad276411218349c6c" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.036345 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-655h7" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.037758 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6bb167da-c709-4fc8-bb64-1e54b8e26cdb","Type":"ContainerDied","Data":"569c5869e74e4e3b49d5e931f06b6c3ae04fd4c1ad77ead923af8296185cfd7e"} Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.037797 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.038118 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fvkzh" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.056874 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-config-data" (OuterVolumeSpecName: "config-data") pod "6bb167da-c709-4fc8-bb64-1e54b8e26cdb" (UID: "6bb167da-c709-4fc8-bb64-1e54b8e26cdb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.079179 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb167da-c709-4fc8-bb64-1e54b8e26cdb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.101585 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.109897 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a9339db1-8dcb-4435-94f5-ac29a7ae99a0" (UID: "a9339db1-8dcb-4435-94f5-ac29a7ae99a0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.131865 4611 scope.go:117] "RemoveContainer" containerID="8ff2a9cb06cc53485bc66968bad04c7630d353190ab6e69e2516420fd3f9e5d1" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.179891 4611 scope.go:117] "RemoveContainer" containerID="a55dc107d64a6e4d7996b582dd0578336753c797829655495e33960dabdb0652" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.181600 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-logs\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.181925 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-config-data\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.181980 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhf76\" (UniqueName: \"kubernetes.io/projected/51a74707-06e6-48d9-8636-a921a4a559e6-kube-api-access-fhf76\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.182068 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.182220 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-internal-tls-certs\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.182286 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-combined-ca-bundle\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.182332 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-httpd-run\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.182407 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-scripts\") pod \"51a74707-06e6-48d9-8636-a921a4a559e6\" (UID: \"51a74707-06e6-48d9-8636-a921a4a559e6\") " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.182892 4611 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9339db1-8dcb-4435-94f5-ac29a7ae99a0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.186775 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-scripts" (OuterVolumeSpecName: "scripts") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.190938 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.192900 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-logs" (OuterVolumeSpecName: "logs") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.193323 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.198936 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51a74707-06e6-48d9-8636-a921a4a559e6-kube-api-access-fhf76" (OuterVolumeSpecName: "kube-api-access-fhf76") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "kube-api-access-fhf76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.231525 4611 scope.go:117] "RemoveContainer" containerID="8d1aae7c35080c854d7a54ece48b8147c58c46ac2f73516251536d479dc83ce8" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.251673 4611 scope.go:117] "RemoveContainer" containerID="dcf54d565f94165c1a3f1b0bcf2ac6f4610b015e68424f7a293e3e368bd52587" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.268109 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.275472 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-config-data" (OuterVolumeSpecName: "config-data") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.277124 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "51a74707-06e6-48d9-8636-a921a4a559e6" (UID: "51a74707-06e6-48d9-8636-a921a4a559e6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.281648 4611 scope.go:117] "RemoveContainer" containerID="4e561103ba5a5c2e5e5c8ba27ecccb3e2b5570641cfb513200f2bb90a8fb2fe8" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288360 4611 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288405 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288420 4611 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288431 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288443 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51a74707-06e6-48d9-8636-a921a4a559e6-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288454 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51a74707-06e6-48d9-8636-a921a4a559e6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288470 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhf76\" (UniqueName: \"kubernetes.io/projected/51a74707-06e6-48d9-8636-a921a4a559e6-kube-api-access-fhf76\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.288504 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.325933 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.348470 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.379935 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.397843 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400237 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400737 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe4e13c-dc14-4649-896d-23ad11daedd5" containerName="mariadb-database-create" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400760 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe4e13c-dc14-4649-896d-23ad11daedd5" containerName="mariadb-database-create" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400794 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-notification-agent" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400804 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-notification-agent" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400814 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400825 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400835 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ede43e6-1802-4389-8a47-ae78b16d3144" containerName="mariadb-database-create" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400843 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ede43e6-1802-4389-8a47-ae78b16d3144" containerName="mariadb-database-create" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400868 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-log" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400877 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-log" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400897 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400906 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400916 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-central-agent" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400924 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-central-agent" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400940 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="proxy-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400948 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="proxy-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400966 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="sg-core" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400977 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="sg-core" Sep 29 13:00:55 crc kubenswrapper[4611]: E0929 13:00:55.400987 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-log" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.400993 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-log" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401210 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe4e13c-dc14-4649-896d-23ad11daedd5" containerName="mariadb-database-create" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401232 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401250 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" containerName="glance-log" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401261 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ede43e6-1802-4389-8a47-ae78b16d3144" containerName="mariadb-database-create" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401274 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="proxy-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401288 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-notification-agent" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401304 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-log" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401316 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" containerName="glance-httpd" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401329 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="sg-core" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.401343 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" containerName="ceilometer-central-agent" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.406232 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.411921 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.412591 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.412881 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.425050 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.442771 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.456687 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.459466 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.470161 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.470347 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.473309 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.507855 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.507927 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c295c03-859a-41dd-acb6-1d7f13cc0877-logs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.508002 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.508035 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c295c03-859a-41dd-acb6-1d7f13cc0877-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.508085 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.508138 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42gzs\" (UniqueName: \"kubernetes.io/projected/8c295c03-859a-41dd-acb6-1d7f13cc0877-kube-api-access-42gzs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.508165 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-config-data\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.508224 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-scripts\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.609851 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-run-httpd\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.609942 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.609975 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42gzs\" (UniqueName: \"kubernetes.io/projected/8c295c03-859a-41dd-acb6-1d7f13cc0877-kube-api-access-42gzs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610000 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-config-data\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610045 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z54fk\" (UniqueName: \"kubernetes.io/projected/1cede910-4168-4e97-bb2f-d4a89cf79d8b-kube-api-access-z54fk\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610076 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-scripts\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610101 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-config-data\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610136 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-scripts\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610169 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610215 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610235 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c295c03-859a-41dd-acb6-1d7f13cc0877-logs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610261 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610299 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-log-httpd\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610335 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610367 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c295c03-859a-41dd-acb6-1d7f13cc0877-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.610944 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c295c03-859a-41dd-acb6-1d7f13cc0877-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.611260 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.612260 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c295c03-859a-41dd-acb6-1d7f13cc0877-logs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.618750 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-config-data\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.629328 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.633296 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.634387 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c295c03-859a-41dd-acb6-1d7f13cc0877-scripts\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.638935 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42gzs\" (UniqueName: \"kubernetes.io/projected/8c295c03-859a-41dd-acb6-1d7f13cc0877-kube-api-access-42gzs\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.685749 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"8c295c03-859a-41dd-acb6-1d7f13cc0877\") " pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713659 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713730 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-log-httpd\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713814 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-run-httpd\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713893 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z54fk\" (UniqueName: \"kubernetes.io/projected/1cede910-4168-4e97-bb2f-d4a89cf79d8b-kube-api-access-z54fk\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713935 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-scripts\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713953 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-config-data\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.713989 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.714906 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-run-httpd\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.716879 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-log-httpd\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.721931 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.723251 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-config-data\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.723521 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.728183 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-scripts\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.741766 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z54fk\" (UniqueName: \"kubernetes.io/projected/1cede910-4168-4e97-bb2f-d4a89cf79d8b-kube-api-access-z54fk\") pod \"ceilometer-0\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " pod="openstack/ceilometer-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.743860 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.768699 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bb167da-c709-4fc8-bb64-1e54b8e26cdb" path="/var/lib/kubelet/pods/6bb167da-c709-4fc8-bb64-1e54b8e26cdb/volumes" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.769461 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9339db1-8dcb-4435-94f5-ac29a7ae99a0" path="/var/lib/kubelet/pods/a9339db1-8dcb-4435-94f5-ac29a7ae99a0/volumes" Sep 29 13:00:55 crc kubenswrapper[4611]: I0929 13:00:55.789474 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.074283 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.147084 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.205007 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.257368 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.259209 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.262887 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.265945 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.300787 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.386085 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6139f1f7-d7fe-403b-b414-989be3576095-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.386393 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdr4t\" (UniqueName: \"kubernetes.io/projected/6139f1f7-d7fe-403b-b414-989be3576095-kube-api-access-vdr4t\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.386493 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.386596 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.386758 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.386939 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.387027 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6139f1f7-d7fe-403b-b414-989be3576095-logs\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.387176 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.476107 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500226 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500292 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6139f1f7-d7fe-403b-b414-989be3576095-logs\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500376 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500459 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6139f1f7-d7fe-403b-b414-989be3576095-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500493 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdr4t\" (UniqueName: \"kubernetes.io/projected/6139f1f7-d7fe-403b-b414-989be3576095-kube-api-access-vdr4t\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500550 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500571 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.500639 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.502026 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6139f1f7-d7fe-403b-b414-989be3576095-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.502826 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6139f1f7-d7fe-403b-b414-989be3576095-logs\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.504121 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.509467 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.512347 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.520092 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.524953 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6139f1f7-d7fe-403b-b414-989be3576095-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.527992 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.532834 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdr4t\" (UniqueName: \"kubernetes.io/projected/6139f1f7-d7fe-403b-b414-989be3576095-kube-api-access-vdr4t\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.560597 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6139f1f7-d7fe-403b-b414-989be3576095\") " pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.588559 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.699128 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.746562 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.815548 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9cds\" (UniqueName: \"kubernetes.io/projected/d0597e5b-7c6b-465d-b298-0f72aa28e514-kube-api-access-w9cds\") pod \"d0597e5b-7c6b-465d-b298-0f72aa28e514\" (UID: \"d0597e5b-7c6b-465d-b298-0f72aa28e514\") " Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.822301 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0597e5b-7c6b-465d-b298-0f72aa28e514-kube-api-access-w9cds" (OuterVolumeSpecName: "kube-api-access-w9cds") pod "d0597e5b-7c6b-465d-b298-0f72aa28e514" (UID: "d0597e5b-7c6b-465d-b298-0f72aa28e514"). InnerVolumeSpecName "kube-api-access-w9cds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:00:56 crc kubenswrapper[4611]: I0929 13:00:56.918160 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9cds\" (UniqueName: \"kubernetes.io/projected/d0597e5b-7c6b-465d-b298-0f72aa28e514-kube-api-access-w9cds\") on node \"crc\" DevicePath \"\"" Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.096252 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5mdd9" Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.096254 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5mdd9" event={"ID":"d0597e5b-7c6b-465d-b298-0f72aa28e514","Type":"ContainerDied","Data":"27f6667fa35ded8edb99212e3dc94db28c7b45b7f7749ee24244862b64974106"} Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.096359 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27f6667fa35ded8edb99212e3dc94db28c7b45b7f7749ee24244862b64974106" Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.098461 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerStarted","Data":"878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a"} Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.098480 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerStarted","Data":"6b92ba5758f97bd4b5e284fc7d42d97c1ab662f47ac1474229b7f36122d445dc"} Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.099431 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8c295c03-859a-41dd-acb6-1d7f13cc0877","Type":"ContainerStarted","Data":"56495a28819babe2ad4b5847a4d9c5e91d086ad79a059b8931af21f48a8ce7f8"} Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.264201 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 13:00:57 crc kubenswrapper[4611]: W0929 13:00:57.307731 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6139f1f7_d7fe_403b_b414_989be3576095.slice/crio-86b887eb250fec65d36bad6ce57b4e5517675f51abcaace75030deb0847f8d01 WatchSource:0}: Error finding container 86b887eb250fec65d36bad6ce57b4e5517675f51abcaace75030deb0847f8d01: Status 404 returned error can't find the container with id 86b887eb250fec65d36bad6ce57b4e5517675f51abcaace75030deb0847f8d01 Sep 29 13:00:57 crc kubenswrapper[4611]: I0929 13:00:57.753819 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51a74707-06e6-48d9-8636-a921a4a559e6" path="/var/lib/kubelet/pods/51a74707-06e6-48d9-8636-a921a4a559e6/volumes" Sep 29 13:00:58 crc kubenswrapper[4611]: I0929 13:00:58.124796 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerStarted","Data":"b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81"} Sep 29 13:00:58 crc kubenswrapper[4611]: I0929 13:00:58.132158 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6139f1f7-d7fe-403b-b414-989be3576095","Type":"ContainerStarted","Data":"86b887eb250fec65d36bad6ce57b4e5517675f51abcaace75030deb0847f8d01"} Sep 29 13:00:58 crc kubenswrapper[4611]: I0929 13:00:58.133445 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8c295c03-859a-41dd-acb6-1d7f13cc0877","Type":"ContainerStarted","Data":"5ca6268c9ab9bdba1254cfd04a8910da83806a7046f26258c30f2c2c54f62b92"} Sep 29 13:00:59 crc kubenswrapper[4611]: I0929 13:00:59.163280 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerStarted","Data":"f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316"} Sep 29 13:00:59 crc kubenswrapper[4611]: I0929 13:00:59.170358 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6139f1f7-d7fe-403b-b414-989be3576095","Type":"ContainerStarted","Data":"4b3db186ab85b4f1e3a1c13f055c76891be5b7e8fd46b95f6f9afe2514782db4"} Sep 29 13:00:59 crc kubenswrapper[4611]: I0929 13:00:59.174100 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8c295c03-859a-41dd-acb6-1d7f13cc0877","Type":"ContainerStarted","Data":"435371285a125271b2245778149baed468399ee5bf71354c5c58dcb7d6ff2264"} Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.137249 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.137227264 podStartE2EDuration="5.137227264s" podCreationTimestamp="2025-09-29 13:00:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:00:59.199080284 +0000 UTC m=+1246.090599890" watchObservedRunningTime="2025-09-29 13:01:00.137227264 +0000 UTC m=+1247.028746880" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.142556 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319181-j5cxx"] Sep 29 13:01:00 crc kubenswrapper[4611]: E0929 13:01:00.143074 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0597e5b-7c6b-465d-b298-0f72aa28e514" containerName="mariadb-database-create" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.143098 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0597e5b-7c6b-465d-b298-0f72aa28e514" containerName="mariadb-database-create" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.143356 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0597e5b-7c6b-465d-b298-0f72aa28e514" containerName="mariadb-database-create" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.155530 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319181-j5cxx"] Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.155671 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.196869 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-config-data\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.196922 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpbpn\" (UniqueName: \"kubernetes.io/projected/aab1b06f-3b44-4f36-91cb-833959f0c9f1-kube-api-access-vpbpn\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.197047 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-fernet-keys\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.197091 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-combined-ca-bundle\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.209002 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6139f1f7-d7fe-403b-b414-989be3576095","Type":"ContainerStarted","Data":"17782b6083149577a554433b99506f3cec673858ee4eeee3f162e58f45d1752e"} Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.241637 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.241594202 podStartE2EDuration="4.241594202s" podCreationTimestamp="2025-09-29 13:00:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:01:00.234513677 +0000 UTC m=+1247.126033303" watchObservedRunningTime="2025-09-29 13:01:00.241594202 +0000 UTC m=+1247.133113808" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.299468 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-fernet-keys\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.299907 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-combined-ca-bundle\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.300129 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-config-data\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.300153 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpbpn\" (UniqueName: \"kubernetes.io/projected/aab1b06f-3b44-4f36-91cb-833959f0c9f1-kube-api-access-vpbpn\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.306916 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-fernet-keys\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.308263 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-combined-ca-bundle\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.311742 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-config-data\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.339160 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpbpn\" (UniqueName: \"kubernetes.io/projected/aab1b06f-3b44-4f36-91cb-833959f0c9f1-kube-api-access-vpbpn\") pod \"keystone-cron-29319181-j5cxx\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:00 crc kubenswrapper[4611]: I0929 13:01:00.504741 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.101516 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319181-j5cxx"] Sep 29 13:01:01 crc kubenswrapper[4611]: W0929 13:01:01.103115 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaab1b06f_3b44_4f36_91cb_833959f0c9f1.slice/crio-8e665f9d718d03578ab5469f9921caed3efa6795813aa078e8119877f5ab0591 WatchSource:0}: Error finding container 8e665f9d718d03578ab5469f9921caed3efa6795813aa078e8119877f5ab0591: Status 404 returned error can't find the container with id 8e665f9d718d03578ab5469f9921caed3efa6795813aa078e8119877f5ab0591 Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.228997 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319181-j5cxx" event={"ID":"aab1b06f-3b44-4f36-91cb-833959f0c9f1","Type":"ContainerStarted","Data":"8e665f9d718d03578ab5469f9921caed3efa6795813aa078e8119877f5ab0591"} Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.234521 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerStarted","Data":"d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd"} Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.263089 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.512064649 podStartE2EDuration="6.263071882s" podCreationTimestamp="2025-09-29 13:00:55 +0000 UTC" firstStartedPulling="2025-09-29 13:00:56.527737334 +0000 UTC m=+1243.419256940" lastFinishedPulling="2025-09-29 13:01:00.278744567 +0000 UTC m=+1247.170264173" observedRunningTime="2025-09-29 13:01:01.260975021 +0000 UTC m=+1248.152494647" watchObservedRunningTime="2025-09-29 13:01:01.263071882 +0000 UTC m=+1248.154591488" Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.487132 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.854476 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-cd48-account-create-rspn7"] Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.860821 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.863878 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.884146 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-cd48-account-create-rspn7"] Sep 29 13:01:01 crc kubenswrapper[4611]: I0929 13:01:01.941203 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.033550 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brpbt\" (UniqueName: \"kubernetes.io/projected/47543a39-6232-4860-a9d9-20654c865434-kube-api-access-brpbt\") pod \"nova-api-cd48-account-create-rspn7\" (UID: \"47543a39-6232-4860-a9d9-20654c865434\") " pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.037181 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cddd-account-create-g6wtp"] Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.038654 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.042817 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.098405 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cddd-account-create-g6wtp"] Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.135839 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brpbt\" (UniqueName: \"kubernetes.io/projected/47543a39-6232-4860-a9d9-20654c865434-kube-api-access-brpbt\") pod \"nova-api-cd48-account-create-rspn7\" (UID: \"47543a39-6232-4860-a9d9-20654c865434\") " pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.163741 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4946-account-create-r9w2d"] Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.165023 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.169332 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brpbt\" (UniqueName: \"kubernetes.io/projected/47543a39-6232-4860-a9d9-20654c865434-kube-api-access-brpbt\") pod \"nova-api-cd48-account-create-rspn7\" (UID: \"47543a39-6232-4860-a9d9-20654c865434\") " pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.171533 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.174940 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4946-account-create-r9w2d"] Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.209860 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.237979 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84rkw\" (UniqueName: \"kubernetes.io/projected/9319ecc4-d8b0-495c-8335-c483325d02a3-kube-api-access-84rkw\") pod \"nova-cell0-cddd-account-create-g6wtp\" (UID: \"9319ecc4-d8b0-495c-8335-c483325d02a3\") " pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.265868 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319181-j5cxx" event={"ID":"aab1b06f-3b44-4f36-91cb-833959f0c9f1","Type":"ContainerStarted","Data":"17554ada99df0d3ce17dc5f0d69cd512241582b701d90e152522bb03a3b680ad"} Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.266396 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.290769 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319181-j5cxx" podStartSLOduration=2.29074496 podStartE2EDuration="2.29074496s" podCreationTimestamp="2025-09-29 13:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:01:02.287045383 +0000 UTC m=+1249.178564999" watchObservedRunningTime="2025-09-29 13:01:02.29074496 +0000 UTC m=+1249.182264566" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.339915 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8mjz\" (UniqueName: \"kubernetes.io/projected/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e-kube-api-access-j8mjz\") pod \"nova-cell1-4946-account-create-r9w2d\" (UID: \"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e\") " pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.340238 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84rkw\" (UniqueName: \"kubernetes.io/projected/9319ecc4-d8b0-495c-8335-c483325d02a3-kube-api-access-84rkw\") pod \"nova-cell0-cddd-account-create-g6wtp\" (UID: \"9319ecc4-d8b0-495c-8335-c483325d02a3\") " pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.415032 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84rkw\" (UniqueName: \"kubernetes.io/projected/9319ecc4-d8b0-495c-8335-c483325d02a3-kube-api-access-84rkw\") pod \"nova-cell0-cddd-account-create-g6wtp\" (UID: \"9319ecc4-d8b0-495c-8335-c483325d02a3\") " pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.444855 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8mjz\" (UniqueName: \"kubernetes.io/projected/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e-kube-api-access-j8mjz\") pod \"nova-cell1-4946-account-create-r9w2d\" (UID: \"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e\") " pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.480099 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8mjz\" (UniqueName: \"kubernetes.io/projected/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e-kube-api-access-j8mjz\") pod \"nova-cell1-4946-account-create-r9w2d\" (UID: \"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e\") " pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.550142 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.660269 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:02 crc kubenswrapper[4611]: I0929 13:01:02.843480 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-cd48-account-create-rspn7"] Sep 29 13:01:03 crc kubenswrapper[4611]: W0929 13:01:03.223292 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f77cbf5_b67a_4cd2_9ccf_7c29988e541e.slice/crio-2fa1eebbf8c2368348dc06871f60b43d5f7199c053f46c011ccbaecf69a8fbee WatchSource:0}: Error finding container 2fa1eebbf8c2368348dc06871f60b43d5f7199c053f46c011ccbaecf69a8fbee: Status 404 returned error can't find the container with id 2fa1eebbf8c2368348dc06871f60b43d5f7199c053f46c011ccbaecf69a8fbee Sep 29 13:01:03 crc kubenswrapper[4611]: I0929 13:01:03.223605 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4946-account-create-r9w2d"] Sep 29 13:01:03 crc kubenswrapper[4611]: I0929 13:01:03.284681 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4946-account-create-r9w2d" event={"ID":"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e","Type":"ContainerStarted","Data":"2fa1eebbf8c2368348dc06871f60b43d5f7199c053f46c011ccbaecf69a8fbee"} Sep 29 13:01:03 crc kubenswrapper[4611]: I0929 13:01:03.294256 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-cd48-account-create-rspn7" event={"ID":"47543a39-6232-4860-a9d9-20654c865434","Type":"ContainerStarted","Data":"3cc87c07cffb219c38e6beb91488255ac87f5a6458252dda7871530c5d1e0992"} Sep 29 13:01:03 crc kubenswrapper[4611]: I0929 13:01:03.294609 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-cd48-account-create-rspn7" event={"ID":"47543a39-6232-4860-a9d9-20654c865434","Type":"ContainerStarted","Data":"208238d5d29001f821d270f8cb7506d1805a930a67997c22a2be41c4493facd8"} Sep 29 13:01:03 crc kubenswrapper[4611]: I0929 13:01:03.324852 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-cd48-account-create-rspn7" podStartSLOduration=2.324829144 podStartE2EDuration="2.324829144s" podCreationTimestamp="2025-09-29 13:01:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:01:03.317142941 +0000 UTC m=+1250.208662557" watchObservedRunningTime="2025-09-29 13:01:03.324829144 +0000 UTC m=+1250.216348750" Sep 29 13:01:03 crc kubenswrapper[4611]: I0929 13:01:03.353148 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cddd-account-create-g6wtp"] Sep 29 13:01:04 crc kubenswrapper[4611]: E0929 13:01:04.041992 4611 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9319ecc4_d8b0_495c_8335_c483325d02a3.slice/crio-conmon-9221d4e9bd21fd8048c1ebbf45a6298602eb0a39036e3ab2634cbc1125e44971.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f77cbf5_b67a_4cd2_9ccf_7c29988e541e.slice/crio-71980fcb542e9851741602fcd25f0720ba0f1e8ca272a650ae941043542cb0f6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f77cbf5_b67a_4cd2_9ccf_7c29988e541e.slice/crio-conmon-71980fcb542e9851741602fcd25f0720ba0f1e8ca272a650ae941043542cb0f6.scope\": RecentStats: unable to find data in memory cache]" Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.303695 4611 generic.go:334] "Generic (PLEG): container finished" podID="9319ecc4-d8b0-495c-8335-c483325d02a3" containerID="9221d4e9bd21fd8048c1ebbf45a6298602eb0a39036e3ab2634cbc1125e44971" exitCode=0 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.303755 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cddd-account-create-g6wtp" event={"ID":"9319ecc4-d8b0-495c-8335-c483325d02a3","Type":"ContainerDied","Data":"9221d4e9bd21fd8048c1ebbf45a6298602eb0a39036e3ab2634cbc1125e44971"} Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.303780 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cddd-account-create-g6wtp" event={"ID":"9319ecc4-d8b0-495c-8335-c483325d02a3","Type":"ContainerStarted","Data":"adc93ea46a9d450e41d14ec16ff155b0acad1c436526db506c1bc6cfb2bbfdc4"} Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.306102 4611 generic.go:334] "Generic (PLEG): container finished" podID="8f77cbf5-b67a-4cd2-9ccf-7c29988e541e" containerID="71980fcb542e9851741602fcd25f0720ba0f1e8ca272a650ae941043542cb0f6" exitCode=0 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.306154 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4946-account-create-r9w2d" event={"ID":"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e","Type":"ContainerDied","Data":"71980fcb542e9851741602fcd25f0720ba0f1e8ca272a650ae941043542cb0f6"} Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.307440 4611 generic.go:334] "Generic (PLEG): container finished" podID="47543a39-6232-4860-a9d9-20654c865434" containerID="3cc87c07cffb219c38e6beb91488255ac87f5a6458252dda7871530c5d1e0992" exitCode=0 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.307472 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-cd48-account-create-rspn7" event={"ID":"47543a39-6232-4860-a9d9-20654c865434","Type":"ContainerDied","Data":"3cc87c07cffb219c38e6beb91488255ac87f5a6458252dda7871530c5d1e0992"} Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.681898 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.682501 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-central-agent" containerID="cri-o://878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a" gracePeriod=30 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.682998 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="proxy-httpd" containerID="cri-o://d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd" gracePeriod=30 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.683114 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="sg-core" containerID="cri-o://f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316" gracePeriod=30 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.683154 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-notification-agent" containerID="cri-o://b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81" gracePeriod=30 Sep 29 13:01:04 crc kubenswrapper[4611]: I0929 13:01:04.796480 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.228500 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.302395 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c9489c674-t4fp8"] Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.322511 4611 generic.go:334] "Generic (PLEG): container finished" podID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerID="d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd" exitCode=0 Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.322544 4611 generic.go:334] "Generic (PLEG): container finished" podID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerID="f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316" exitCode=2 Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.322700 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerDied","Data":"d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd"} Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.322731 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerDied","Data":"f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316"} Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.323034 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon-log" containerID="cri-o://02e3749cae440178d08377420327e1a0040c2355a5bcef666f74ba88526fabe1" gracePeriod=30 Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.324050 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" containerID="cri-o://c6aa15f2e771bb555ed0a0d2d6eb265e41f78d80a168a4a2e6d43155dae5b0ea" gracePeriod=30 Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.752743 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.753096 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.832869 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.841922 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 13:01:05 crc kubenswrapper[4611]: I0929 13:01:05.971612 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.031763 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8mjz\" (UniqueName: \"kubernetes.io/projected/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e-kube-api-access-j8mjz\") pod \"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e\" (UID: \"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e\") " Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.051523 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e-kube-api-access-j8mjz" (OuterVolumeSpecName: "kube-api-access-j8mjz") pod "8f77cbf5-b67a-4cd2-9ccf-7c29988e541e" (UID: "8f77cbf5-b67a-4cd2-9ccf-7c29988e541e"). InnerVolumeSpecName "kube-api-access-j8mjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.136455 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8mjz\" (UniqueName: \"kubernetes.io/projected/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e-kube-api-access-j8mjz\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.180981 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.190057 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.240505 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84rkw\" (UniqueName: \"kubernetes.io/projected/9319ecc4-d8b0-495c-8335-c483325d02a3-kube-api-access-84rkw\") pod \"9319ecc4-d8b0-495c-8335-c483325d02a3\" (UID: \"9319ecc4-d8b0-495c-8335-c483325d02a3\") " Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.244336 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brpbt\" (UniqueName: \"kubernetes.io/projected/47543a39-6232-4860-a9d9-20654c865434-kube-api-access-brpbt\") pod \"47543a39-6232-4860-a9d9-20654c865434\" (UID: \"47543a39-6232-4860-a9d9-20654c865434\") " Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.262911 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9319ecc4-d8b0-495c-8335-c483325d02a3-kube-api-access-84rkw" (OuterVolumeSpecName: "kube-api-access-84rkw") pod "9319ecc4-d8b0-495c-8335-c483325d02a3" (UID: "9319ecc4-d8b0-495c-8335-c483325d02a3"). InnerVolumeSpecName "kube-api-access-84rkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.263260 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47543a39-6232-4860-a9d9-20654c865434-kube-api-access-brpbt" (OuterVolumeSpecName: "kube-api-access-brpbt") pod "47543a39-6232-4860-a9d9-20654c865434" (UID: "47543a39-6232-4860-a9d9-20654c865434"). InnerVolumeSpecName "kube-api-access-brpbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.333525 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4946-account-create-r9w2d" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.334552 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4946-account-create-r9w2d" event={"ID":"8f77cbf5-b67a-4cd2-9ccf-7c29988e541e","Type":"ContainerDied","Data":"2fa1eebbf8c2368348dc06871f60b43d5f7199c053f46c011ccbaecf69a8fbee"} Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.334595 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fa1eebbf8c2368348dc06871f60b43d5f7199c053f46c011ccbaecf69a8fbee" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.336020 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-cd48-account-create-rspn7" event={"ID":"47543a39-6232-4860-a9d9-20654c865434","Type":"ContainerDied","Data":"208238d5d29001f821d270f8cb7506d1805a930a67997c22a2be41c4493facd8"} Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.336045 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="208238d5d29001f821d270f8cb7506d1805a930a67997c22a2be41c4493facd8" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.336094 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-cd48-account-create-rspn7" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.341666 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cddd-account-create-g6wtp" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.341669 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cddd-account-create-g6wtp" event={"ID":"9319ecc4-d8b0-495c-8335-c483325d02a3","Type":"ContainerDied","Data":"adc93ea46a9d450e41d14ec16ff155b0acad1c436526db506c1bc6cfb2bbfdc4"} Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.341706 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adc93ea46a9d450e41d14ec16ff155b0acad1c436526db506c1bc6cfb2bbfdc4" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.344222 4611 generic.go:334] "Generic (PLEG): container finished" podID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerID="b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81" exitCode=0 Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.345673 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerDied","Data":"b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81"} Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.345717 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.345853 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.346927 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84rkw\" (UniqueName: \"kubernetes.io/projected/9319ecc4-d8b0-495c-8335-c483325d02a3-kube-api-access-84rkw\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.346975 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brpbt\" (UniqueName: \"kubernetes.io/projected/47543a39-6232-4860-a9d9-20654c865434-kube-api-access-brpbt\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.590184 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.591558 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.627659 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:06 crc kubenswrapper[4611]: I0929 13:01:06.636767 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.260506 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.358659 4611 generic.go:334] "Generic (PLEG): container finished" podID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerID="c6aa15f2e771bb555ed0a0d2d6eb265e41f78d80a168a4a2e6d43155dae5b0ea" exitCode=0 Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.358748 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerDied","Data":"c6aa15f2e771bb555ed0a0d2d6eb265e41f78d80a168a4a2e6d43155dae5b0ea"} Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.358823 4611 scope.go:117] "RemoveContainer" containerID="d6fee3b5d5de62a3b7b8286065e555d624605989eba216898d5cc83f4d5aa788" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.363840 4611 generic.go:334] "Generic (PLEG): container finished" podID="aab1b06f-3b44-4f36-91cb-833959f0c9f1" containerID="17554ada99df0d3ce17dc5f0d69cd512241582b701d90e152522bb03a3b680ad" exitCode=0 Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.363916 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319181-j5cxx" event={"ID":"aab1b06f-3b44-4f36-91cb-833959f0c9f1","Type":"ContainerDied","Data":"17554ada99df0d3ce17dc5f0d69cd512241582b701d90e152522bb03a3b680ad"} Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370497 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-sg-core-conf-yaml\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370581 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z54fk\" (UniqueName: \"kubernetes.io/projected/1cede910-4168-4e97-bb2f-d4a89cf79d8b-kube-api-access-z54fk\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370612 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-scripts\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370683 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-log-httpd\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370715 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-config-data\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370761 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-combined-ca-bundle\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.370873 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-run-httpd\") pod \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\" (UID: \"1cede910-4168-4e97-bb2f-d4a89cf79d8b\") " Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.371888 4611 generic.go:334] "Generic (PLEG): container finished" podID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerID="878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a" exitCode=0 Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.371960 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.372017 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerDied","Data":"878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a"} Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.372048 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1cede910-4168-4e97-bb2f-d4a89cf79d8b","Type":"ContainerDied","Data":"6b92ba5758f97bd4b5e284fc7d42d97c1ab662f47ac1474229b7f36122d445dc"} Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.372075 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.373017 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.373215 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.373308 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.392919 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-scripts" (OuterVolumeSpecName: "scripts") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.412212 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cede910-4168-4e97-bb2f-d4a89cf79d8b-kube-api-access-z54fk" (OuterVolumeSpecName: "kube-api-access-z54fk") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "kube-api-access-z54fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.420886 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.475055 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.475093 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z54fk\" (UniqueName: \"kubernetes.io/projected/1cede910-4168-4e97-bb2f-d4a89cf79d8b-kube-api-access-z54fk\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.475106 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.475116 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.475124 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1cede910-4168-4e97-bb2f-d4a89cf79d8b-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.566465 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-config-data" (OuterVolumeSpecName: "config-data") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.568790 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1cede910-4168-4e97-bb2f-d4a89cf79d8b" (UID: "1cede910-4168-4e97-bb2f-d4a89cf79d8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.577229 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.577267 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cede910-4168-4e97-bb2f-d4a89cf79d8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.613934 4611 scope.go:117] "RemoveContainer" containerID="d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.654144 4611 scope.go:117] "RemoveContainer" containerID="f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.682042 4611 scope.go:117] "RemoveContainer" containerID="b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.742362 4611 scope.go:117] "RemoveContainer" containerID="878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.792278 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.792402 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.797866 4611 scope.go:117] "RemoveContainer" containerID="d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.798951 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd\": container with ID starting with d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd not found: ID does not exist" containerID="d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.799008 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd"} err="failed to get container status \"d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd\": rpc error: code = NotFound desc = could not find container \"d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd\": container with ID starting with d41642c7d6870533ca97d7d0cce3a5ffa3c65ec4ea0f57991ae2b05097bfa9cd not found: ID does not exist" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.799035 4611 scope.go:117] "RemoveContainer" containerID="f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.799477 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316\": container with ID starting with f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316 not found: ID does not exist" containerID="f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.799593 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316"} err="failed to get container status \"f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316\": rpc error: code = NotFound desc = could not find container \"f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316\": container with ID starting with f2361e89cc221e4daf0c310991ecea4845fc48428000dd7e5e83611800c22316 not found: ID does not exist" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.799758 4611 scope.go:117] "RemoveContainer" containerID="b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.800060 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81\": container with ID starting with b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81 not found: ID does not exist" containerID="b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.800098 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81"} err="failed to get container status \"b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81\": rpc error: code = NotFound desc = could not find container \"b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81\": container with ID starting with b2d38a05b5eb9e5ac117812e06fd9d7aeac610a5cb8f786909f1646d4ea85d81 not found: ID does not exist" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.806190 4611 scope.go:117] "RemoveContainer" containerID="878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.811700 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a\": container with ID starting with 878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a not found: ID does not exist" containerID="878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.811762 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a"} err="failed to get container status \"878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a\": rpc error: code = NotFound desc = could not find container \"878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a\": container with ID starting with 878125fe58a496545731f3c7041e1c460782074be0e94aabeaec8c7a11ac561a not found: ID does not exist" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.856489 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.856984 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="proxy-httpd" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857003 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="proxy-httpd" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.857014 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-notification-agent" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857021 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-notification-agent" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.857048 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f77cbf5-b67a-4cd2-9ccf-7c29988e541e" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857055 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f77cbf5-b67a-4cd2-9ccf-7c29988e541e" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.857077 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47543a39-6232-4860-a9d9-20654c865434" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857089 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="47543a39-6232-4860-a9d9-20654c865434" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.857106 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="sg-core" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857112 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="sg-core" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.857126 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-central-agent" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857132 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-central-agent" Sep 29 13:01:07 crc kubenswrapper[4611]: E0929 13:01:07.857155 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9319ecc4-d8b0-495c-8335-c483325d02a3" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857162 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="9319ecc4-d8b0-495c-8335-c483325d02a3" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857400 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="proxy-httpd" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857432 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-central-agent" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857444 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="9319ecc4-d8b0-495c-8335-c483325d02a3" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857458 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="ceilometer-notification-agent" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857470 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="47543a39-6232-4860-a9d9-20654c865434" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857481 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" containerName="sg-core" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.857488 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f77cbf5-b67a-4cd2-9ccf-7c29988e541e" containerName="mariadb-account-create" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.859798 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.866768 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.867833 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.883045 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987168 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987236 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987275 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-log-httpd\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987316 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-run-httpd\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987346 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-scripts\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987405 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8dp2\" (UniqueName: \"kubernetes.io/projected/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-kube-api-access-g8dp2\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:07 crc kubenswrapper[4611]: I0929 13:01:07.987425 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-config-data\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088648 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088722 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088760 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-log-httpd\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088799 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-run-httpd\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088828 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-scripts\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088889 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8dp2\" (UniqueName: \"kubernetes.io/projected/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-kube-api-access-g8dp2\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.088912 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-config-data\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.090222 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-log-httpd\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.090411 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-run-httpd\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.093331 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-scripts\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.099546 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.100254 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-config-data\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.113837 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.148240 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8dp2\" (UniqueName: \"kubernetes.io/projected/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-kube-api-access-g8dp2\") pod \"ceilometer-0\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.204504 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.400518 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.400791 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.475730 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.796500 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:08 crc kubenswrapper[4611]: W0929 13:01:08.808781 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40a310c6_8f8d_4a0f_8fd6_d524af43ab76.slice/crio-80b06e12e6f47095ea272244593d3307d2ccfa966453d1d92f348e565ed6fb9c WatchSource:0}: Error finding container 80b06e12e6f47095ea272244593d3307d2ccfa966453d1d92f348e565ed6fb9c: Status 404 returned error can't find the container with id 80b06e12e6f47095ea272244593d3307d2ccfa966453d1d92f348e565ed6fb9c Sep 29 13:01:08 crc kubenswrapper[4611]: I0929 13:01:08.952212 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.013728 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-combined-ca-bundle\") pod \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.013873 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-fernet-keys\") pod \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.013986 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpbpn\" (UniqueName: \"kubernetes.io/projected/aab1b06f-3b44-4f36-91cb-833959f0c9f1-kube-api-access-vpbpn\") pod \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.014017 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-config-data\") pod \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\" (UID: \"aab1b06f-3b44-4f36-91cb-833959f0c9f1\") " Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.020784 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aab1b06f-3b44-4f36-91cb-833959f0c9f1" (UID: "aab1b06f-3b44-4f36-91cb-833959f0c9f1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.021353 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aab1b06f-3b44-4f36-91cb-833959f0c9f1-kube-api-access-vpbpn" (OuterVolumeSpecName: "kube-api-access-vpbpn") pod "aab1b06f-3b44-4f36-91cb-833959f0c9f1" (UID: "aab1b06f-3b44-4f36-91cb-833959f0c9f1"). InnerVolumeSpecName "kube-api-access-vpbpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.115935 4611 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.115968 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpbpn\" (UniqueName: \"kubernetes.io/projected/aab1b06f-3b44-4f36-91cb-833959f0c9f1-kube-api-access-vpbpn\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.133753 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aab1b06f-3b44-4f36-91cb-833959f0c9f1" (UID: "aab1b06f-3b44-4f36-91cb-833959f0c9f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.195763 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-config-data" (OuterVolumeSpecName: "config-data") pod "aab1b06f-3b44-4f36-91cb-833959f0c9f1" (UID: "aab1b06f-3b44-4f36-91cb-833959f0c9f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.217883 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.217925 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1b06f-3b44-4f36-91cb-833959f0c9f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.416648 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerStarted","Data":"a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151"} Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.416689 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerStarted","Data":"80b06e12e6f47095ea272244593d3307d2ccfa966453d1d92f348e565ed6fb9c"} Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.424797 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319181-j5cxx" event={"ID":"aab1b06f-3b44-4f36-91cb-833959f0c9f1","Type":"ContainerDied","Data":"8e665f9d718d03578ab5469f9921caed3efa6795813aa078e8119877f5ab0591"} Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.424834 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e665f9d718d03578ab5469f9921caed3efa6795813aa078e8119877f5ab0591" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.424872 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319181-j5cxx" Sep 29 13:01:09 crc kubenswrapper[4611]: I0929 13:01:09.762739 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cede910-4168-4e97-bb2f-d4a89cf79d8b" path="/var/lib/kubelet/pods/1cede910-4168-4e97-bb2f-d4a89cf79d8b/volumes" Sep 29 13:01:10 crc kubenswrapper[4611]: I0929 13:01:10.435876 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"900d519c-288a-4341-911f-e429cbddfd5b","Type":"ContainerStarted","Data":"b523b1cefb0bc86e82651cb2485164ffd84786629efb63de528fb53a0d93b6ae"} Sep 29 13:01:10 crc kubenswrapper[4611]: I0929 13:01:10.439036 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerStarted","Data":"38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24"} Sep 29 13:01:10 crc kubenswrapper[4611]: I0929 13:01:10.439085 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerStarted","Data":"612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775"} Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.511118 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.511746 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.546143 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.420433037 podStartE2EDuration="40.546119649s" podCreationTimestamp="2025-09-29 13:00:31 +0000 UTC" firstStartedPulling="2025-09-29 13:00:32.806078883 +0000 UTC m=+1219.697598489" lastFinishedPulling="2025-09-29 13:01:09.931765495 +0000 UTC m=+1256.823285101" observedRunningTime="2025-09-29 13:01:10.457303322 +0000 UTC m=+1257.348822918" watchObservedRunningTime="2025-09-29 13:01:11.546119649 +0000 UTC m=+1258.437639255" Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.622439 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.623259 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.631415 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 13:01:11 crc kubenswrapper[4611]: I0929 13:01:11.799644 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.351017 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-njndr"] Sep 29 13:01:12 crc kubenswrapper[4611]: E0929 13:01:12.351699 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab1b06f-3b44-4f36-91cb-833959f0c9f1" containerName="keystone-cron" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.351715 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab1b06f-3b44-4f36-91cb-833959f0c9f1" containerName="keystone-cron" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.351932 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="aab1b06f-3b44-4f36-91cb-833959f0c9f1" containerName="keystone-cron" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.352550 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.361335 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.362063 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7l44j" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.365783 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-njndr"] Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.371928 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.481698 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerStarted","Data":"3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35"} Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.507124 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8c88\" (UniqueName: \"kubernetes.io/projected/15aa0847-f436-4999-9622-92c588953523-kube-api-access-z8c88\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.507428 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-scripts\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.507615 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.507748 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-config-data\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.512329 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.292737936 podStartE2EDuration="5.51230959s" podCreationTimestamp="2025-09-29 13:01:07 +0000 UTC" firstStartedPulling="2025-09-29 13:01:08.813768325 +0000 UTC m=+1255.705287931" lastFinishedPulling="2025-09-29 13:01:12.033339979 +0000 UTC m=+1258.924859585" observedRunningTime="2025-09-29 13:01:12.503958329 +0000 UTC m=+1259.395477935" watchObservedRunningTime="2025-09-29 13:01:12.51230959 +0000 UTC m=+1259.403829196" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.609928 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8c88\" (UniqueName: \"kubernetes.io/projected/15aa0847-f436-4999-9622-92c588953523-kube-api-access-z8c88\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.610040 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-scripts\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.610204 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.610246 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-config-data\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.615617 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-config-data\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.618395 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.624165 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-scripts\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.632958 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.652563 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8c88\" (UniqueName: \"kubernetes.io/projected/15aa0847-f436-4999-9622-92c588953523-kube-api-access-z8c88\") pod \"nova-cell0-conductor-db-sync-njndr\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:12 crc kubenswrapper[4611]: I0929 13:01:12.670753 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:13 crc kubenswrapper[4611]: I0929 13:01:13.169430 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-njndr"] Sep 29 13:01:13 crc kubenswrapper[4611]: I0929 13:01:13.499807 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-njndr" event={"ID":"15aa0847-f436-4999-9622-92c588953523","Type":"ContainerStarted","Data":"f6a4baa3a88bc50fbf51c2ac835aee91b496d4c013ffd49d38ef6b765f6f5256"} Sep 29 13:01:13 crc kubenswrapper[4611]: I0929 13:01:13.500581 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:01:14 crc kubenswrapper[4611]: I0929 13:01:14.528247 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-central-agent" containerID="cri-o://a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151" gracePeriod=30 Sep 29 13:01:14 crc kubenswrapper[4611]: I0929 13:01:14.528719 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="proxy-httpd" containerID="cri-o://3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35" gracePeriod=30 Sep 29 13:01:14 crc kubenswrapper[4611]: I0929 13:01:14.528741 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="sg-core" containerID="cri-o://38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24" gracePeriod=30 Sep 29 13:01:14 crc kubenswrapper[4611]: I0929 13:01:14.528796 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-notification-agent" containerID="cri-o://612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775" gracePeriod=30 Sep 29 13:01:15 crc kubenswrapper[4611]: I0929 13:01:15.543599 4611 generic.go:334] "Generic (PLEG): container finished" podID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerID="3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35" exitCode=0 Sep 29 13:01:15 crc kubenswrapper[4611]: I0929 13:01:15.545015 4611 generic.go:334] "Generic (PLEG): container finished" podID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerID="38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24" exitCode=2 Sep 29 13:01:15 crc kubenswrapper[4611]: I0929 13:01:15.545042 4611 generic.go:334] "Generic (PLEG): container finished" podID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerID="612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775" exitCode=0 Sep 29 13:01:15 crc kubenswrapper[4611]: I0929 13:01:15.543743 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerDied","Data":"3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35"} Sep 29 13:01:15 crc kubenswrapper[4611]: I0929 13:01:15.545083 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerDied","Data":"38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24"} Sep 29 13:01:15 crc kubenswrapper[4611]: I0929 13:01:15.545099 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerDied","Data":"612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775"} Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.134148 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205283 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8dp2\" (UniqueName: \"kubernetes.io/projected/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-kube-api-access-g8dp2\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205485 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-sg-core-conf-yaml\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205652 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-config-data\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205732 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-combined-ca-bundle\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205881 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-scripts\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205908 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-log-httpd\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.205942 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-run-httpd\") pod \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\" (UID: \"40a310c6-8f8d-4a0f-8fd6-d524af43ab76\") " Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.207712 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.208126 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.214522 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-kube-api-access-g8dp2" (OuterVolumeSpecName: "kube-api-access-g8dp2") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "kube-api-access-g8dp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.223047 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-scripts" (OuterVolumeSpecName: "scripts") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.311857 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.312169 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.312275 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.312368 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8dp2\" (UniqueName: \"kubernetes.io/projected/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-kube-api-access-g8dp2\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.347726 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.413765 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.427271 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-config-data" (OuterVolumeSpecName: "config-data") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.428336 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40a310c6-8f8d-4a0f-8fd6-d524af43ab76" (UID: "40a310c6-8f8d-4a0f-8fd6-d524af43ab76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.515522 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.515559 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40a310c6-8f8d-4a0f-8fd6-d524af43ab76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.573647 4611 generic.go:334] "Generic (PLEG): container finished" podID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerID="a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151" exitCode=0 Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.573687 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerDied","Data":"a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151"} Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.573739 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40a310c6-8f8d-4a0f-8fd6-d524af43ab76","Type":"ContainerDied","Data":"80b06e12e6f47095ea272244593d3307d2ccfa966453d1d92f348e565ed6fb9c"} Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.573760 4611 scope.go:117] "RemoveContainer" containerID="3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.573912 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.609803 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.620308 4611 scope.go:117] "RemoveContainer" containerID="38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.621399 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.641677 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.642079 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-central-agent" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642097 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-central-agent" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.642112 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="proxy-httpd" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642118 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="proxy-httpd" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.642137 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="sg-core" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642143 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="sg-core" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.642167 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-notification-agent" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642173 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-notification-agent" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642333 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-central-agent" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642350 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="ceilometer-notification-agent" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642370 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="sg-core" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.642380 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" containerName="proxy-httpd" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.644306 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.651467 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.651777 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.674463 4611 scope.go:117] "RemoveContainer" containerID="612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.675523 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720059 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-run-httpd\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720155 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720194 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-scripts\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720211 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-config-data\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720601 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720732 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-log-httpd\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.720786 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmrct\" (UniqueName: \"kubernetes.io/projected/21ab812e-ceb0-4fe6-a585-06fea29d6b71-kube-api-access-wmrct\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.754840 4611 scope.go:117] "RemoveContainer" containerID="a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.754856 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40a310c6-8f8d-4a0f-8fd6-d524af43ab76" path="/var/lib/kubelet/pods/40a310c6-8f8d-4a0f-8fd6-d524af43ab76/volumes" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.774432 4611 scope.go:117] "RemoveContainer" containerID="3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.774975 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35\": container with ID starting with 3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35 not found: ID does not exist" containerID="3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.775040 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35"} err="failed to get container status \"3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35\": rpc error: code = NotFound desc = could not find container \"3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35\": container with ID starting with 3839c7da5760091a749628fdba6870146951406c5356f219b2a86c7292cb4f35 not found: ID does not exist" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.775078 4611 scope.go:117] "RemoveContainer" containerID="38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.775589 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24\": container with ID starting with 38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24 not found: ID does not exist" containerID="38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.775674 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24"} err="failed to get container status \"38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24\": rpc error: code = NotFound desc = could not find container \"38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24\": container with ID starting with 38fb0607d834e70ced9e3bef29e94459515196674d401934457dc5d8e010ec24 not found: ID does not exist" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.775707 4611 scope.go:117] "RemoveContainer" containerID="612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.776129 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775\": container with ID starting with 612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775 not found: ID does not exist" containerID="612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.776159 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775"} err="failed to get container status \"612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775\": rpc error: code = NotFound desc = could not find container \"612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775\": container with ID starting with 612c5ce84dd28b2c7f1eaf66e198a5e9b6a2c4a6949299b39f5f527d709bb775 not found: ID does not exist" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.776181 4611 scope.go:117] "RemoveContainer" containerID="a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151" Sep 29 13:01:17 crc kubenswrapper[4611]: E0929 13:01:17.776440 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151\": container with ID starting with a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151 not found: ID does not exist" containerID="a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.776473 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151"} err="failed to get container status \"a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151\": rpc error: code = NotFound desc = could not find container \"a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151\": container with ID starting with a63308147b836bf5990ad98251609daf61c8fc69a8b504a0534445fa0df48151 not found: ID does not exist" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823117 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823186 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-scripts\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823211 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-config-data\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823297 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823329 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-log-httpd\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823366 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmrct\" (UniqueName: \"kubernetes.io/projected/21ab812e-ceb0-4fe6-a585-06fea29d6b71-kube-api-access-wmrct\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.823439 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-run-httpd\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.825549 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-run-httpd\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.825852 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-log-httpd\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.829820 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.830837 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-config-data\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.831167 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.831934 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-scripts\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:17 crc kubenswrapper[4611]: I0929 13:01:17.849524 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmrct\" (UniqueName: \"kubernetes.io/projected/21ab812e-ceb0-4fe6-a585-06fea29d6b71-kube-api-access-wmrct\") pod \"ceilometer-0\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " pod="openstack/ceilometer-0" Sep 29 13:01:18 crc kubenswrapper[4611]: I0929 13:01:18.057330 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:18 crc kubenswrapper[4611]: I0929 13:01:18.467069 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Sep 29 13:01:18 crc kubenswrapper[4611]: I0929 13:01:18.567194 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:18 crc kubenswrapper[4611]: W0929 13:01:18.586929 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21ab812e_ceb0_4fe6_a585_06fea29d6b71.slice/crio-4bf3bb9a7842b09baa1b8e4eab827cdb15fe84462e7bbf32529a77d788d561a2 WatchSource:0}: Error finding container 4bf3bb9a7842b09baa1b8e4eab827cdb15fe84462e7bbf32529a77d788d561a2: Status 404 returned error can't find the container with id 4bf3bb9a7842b09baa1b8e4eab827cdb15fe84462e7bbf32529a77d788d561a2 Sep 29 13:01:19 crc kubenswrapper[4611]: I0929 13:01:19.604724 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerStarted","Data":"f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38"} Sep 29 13:01:19 crc kubenswrapper[4611]: I0929 13:01:19.605257 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerStarted","Data":"af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e"} Sep 29 13:01:19 crc kubenswrapper[4611]: I0929 13:01:19.605269 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerStarted","Data":"4bf3bb9a7842b09baa1b8e4eab827cdb15fe84462e7bbf32529a77d788d561a2"} Sep 29 13:01:20 crc kubenswrapper[4611]: I0929 13:01:20.842838 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:28 crc kubenswrapper[4611]: I0929 13:01:28.466990 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5c9489c674-t4fp8" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Sep 29 13:01:28 crc kubenswrapper[4611]: I0929 13:01:28.467673 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:01:28 crc kubenswrapper[4611]: I0929 13:01:28.716242 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-njndr" event={"ID":"15aa0847-f436-4999-9622-92c588953523","Type":"ContainerStarted","Data":"028d5c67557afc2c896e97de914d5a106bec8c6d0751b728381a5a09c751105e"} Sep 29 13:01:28 crc kubenswrapper[4611]: I0929 13:01:28.720073 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerStarted","Data":"879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b"} Sep 29 13:01:28 crc kubenswrapper[4611]: I0929 13:01:28.742339 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-njndr" podStartSLOduration=1.721118727 podStartE2EDuration="16.742314302s" podCreationTimestamp="2025-09-29 13:01:12 +0000 UTC" firstStartedPulling="2025-09-29 13:01:13.171647107 +0000 UTC m=+1260.063166713" lastFinishedPulling="2025-09-29 13:01:28.192842672 +0000 UTC m=+1275.084362288" observedRunningTime="2025-09-29 13:01:28.738592615 +0000 UTC m=+1275.630112241" watchObservedRunningTime="2025-09-29 13:01:28.742314302 +0000 UTC m=+1275.633833918" Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.743539 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerStarted","Data":"ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5"} Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.744162 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.743815 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="proxy-httpd" containerID="cri-o://ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5" gracePeriod=30 Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.743729 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-central-agent" containerID="cri-o://af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e" gracePeriod=30 Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.743884 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="sg-core" containerID="cri-o://879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b" gracePeriod=30 Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.743827 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-notification-agent" containerID="cri-o://f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38" gracePeriod=30 Sep 29 13:01:30 crc kubenswrapper[4611]: I0929 13:01:30.775165 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.360323213 podStartE2EDuration="13.775140639s" podCreationTimestamp="2025-09-29 13:01:17 +0000 UTC" firstStartedPulling="2025-09-29 13:01:18.59188592 +0000 UTC m=+1265.483405526" lastFinishedPulling="2025-09-29 13:01:30.006703346 +0000 UTC m=+1276.898222952" observedRunningTime="2025-09-29 13:01:30.769516346 +0000 UTC m=+1277.661035962" watchObservedRunningTime="2025-09-29 13:01:30.775140639 +0000 UTC m=+1277.666660245" Sep 29 13:01:31 crc kubenswrapper[4611]: I0929 13:01:31.757633 4611 generic.go:334] "Generic (PLEG): container finished" podID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerID="ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5" exitCode=0 Sep 29 13:01:31 crc kubenswrapper[4611]: I0929 13:01:31.757944 4611 generic.go:334] "Generic (PLEG): container finished" podID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerID="879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b" exitCode=2 Sep 29 13:01:31 crc kubenswrapper[4611]: I0929 13:01:31.757956 4611 generic.go:334] "Generic (PLEG): container finished" podID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerID="af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e" exitCode=0 Sep 29 13:01:31 crc kubenswrapper[4611]: I0929 13:01:31.757662 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerDied","Data":"ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5"} Sep 29 13:01:31 crc kubenswrapper[4611]: I0929 13:01:31.757990 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerDied","Data":"879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b"} Sep 29 13:01:31 crc kubenswrapper[4611]: I0929 13:01:31.758005 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerDied","Data":"af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e"} Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.368454 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.430806 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-run-httpd\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.430879 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-scripts\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.430989 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-combined-ca-bundle\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.431033 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmrct\" (UniqueName: \"kubernetes.io/projected/21ab812e-ceb0-4fe6-a585-06fea29d6b71-kube-api-access-wmrct\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.431077 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-log-httpd\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.431195 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-sg-core-conf-yaml\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.431283 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-config-data\") pod \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\" (UID: \"21ab812e-ceb0-4fe6-a585-06fea29d6b71\") " Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.431888 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.438268 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21ab812e-ceb0-4fe6-a585-06fea29d6b71-kube-api-access-wmrct" (OuterVolumeSpecName: "kube-api-access-wmrct") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "kube-api-access-wmrct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.438314 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-scripts" (OuterVolumeSpecName: "scripts") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.438364 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.480259 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.533441 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.533481 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.533493 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmrct\" (UniqueName: \"kubernetes.io/projected/21ab812e-ceb0-4fe6-a585-06fea29d6b71-kube-api-access-wmrct\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.533507 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/21ab812e-ceb0-4fe6-a585-06fea29d6b71-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.533518 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.534656 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.586471 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-config-data" (OuterVolumeSpecName: "config-data") pod "21ab812e-ceb0-4fe6-a585-06fea29d6b71" (UID: "21ab812e-ceb0-4fe6-a585-06fea29d6b71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.634845 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.634887 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21ab812e-ceb0-4fe6-a585-06fea29d6b71-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.770205 4611 generic.go:334] "Generic (PLEG): container finished" podID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerID="f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38" exitCode=0 Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.770217 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerDied","Data":"f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38"} Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.770300 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"21ab812e-ceb0-4fe6-a585-06fea29d6b71","Type":"ContainerDied","Data":"4bf3bb9a7842b09baa1b8e4eab827cdb15fe84462e7bbf32529a77d788d561a2"} Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.770325 4611 scope.go:117] "RemoveContainer" containerID="ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.770250 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.797199 4611 scope.go:117] "RemoveContainer" containerID="879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.816406 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.827359 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.838824 4611 scope.go:117] "RemoveContainer" containerID="f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.871063 4611 scope.go:117] "RemoveContainer" containerID="af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.872022 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.872562 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="sg-core" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.872580 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="sg-core" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.872605 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-notification-agent" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.872613 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-notification-agent" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.872677 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-central-agent" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.872688 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-central-agent" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.872702 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="proxy-httpd" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.872710 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="proxy-httpd" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.872986 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-notification-agent" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.873014 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="proxy-httpd" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.873043 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="sg-core" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.873054 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" containerName="ceilometer-central-agent" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.875039 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.882240 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.882757 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.896817 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.916140 4611 scope.go:117] "RemoveContainer" containerID="ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.917969 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5\": container with ID starting with ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5 not found: ID does not exist" containerID="ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.918128 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5"} err="failed to get container status \"ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5\": rpc error: code = NotFound desc = could not find container \"ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5\": container with ID starting with ed11a9832c740fe1153d3034c4f7cf016f7d940351cedee7031807271e7bb0f5 not found: ID does not exist" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.918221 4611 scope.go:117] "RemoveContainer" containerID="879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.918813 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b\": container with ID starting with 879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b not found: ID does not exist" containerID="879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.918926 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b"} err="failed to get container status \"879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b\": rpc error: code = NotFound desc = could not find container \"879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b\": container with ID starting with 879baed6982666546424268db32bd1a9583f909a41e0e9dc1f6e61c5f9609f9b not found: ID does not exist" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.918995 4611 scope.go:117] "RemoveContainer" containerID="f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.922988 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38\": container with ID starting with f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38 not found: ID does not exist" containerID="f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.923183 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38"} err="failed to get container status \"f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38\": rpc error: code = NotFound desc = could not find container \"f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38\": container with ID starting with f224705cce846ea62c4bb852c8f61e40badcbb57518f57513fce2cfc21aa8d38 not found: ID does not exist" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.923294 4611 scope.go:117] "RemoveContainer" containerID="af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e" Sep 29 13:01:32 crc kubenswrapper[4611]: E0929 13:01:32.923648 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e\": container with ID starting with af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e not found: ID does not exist" containerID="af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.923746 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e"} err="failed to get container status \"af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e\": rpc error: code = NotFound desc = could not find container \"af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e\": container with ID starting with af3214f8b5864cab67b13ff91a019ed8efa49abf2a4a1b1ea2b05faaa4bac94e not found: ID does not exist" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.941444 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.941798 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-scripts\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.941927 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-log-httpd\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.942111 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-run-httpd\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.942300 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-config-data\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.942436 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:32 crc kubenswrapper[4611]: I0929 13:01:32.942545 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rscgc\" (UniqueName: \"kubernetes.io/projected/ffbec93d-02b4-4ac6-9f15-7267d9913543-kube-api-access-rscgc\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.044095 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-run-httpd\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.044568 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-config-data\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.044709 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.044758 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rscgc\" (UniqueName: \"kubernetes.io/projected/ffbec93d-02b4-4ac6-9f15-7267d9913543-kube-api-access-rscgc\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.044790 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.044943 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-run-httpd\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.045361 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-scripts\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.045429 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-log-httpd\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.045973 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-log-httpd\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.053560 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-scripts\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.054075 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.055825 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.064216 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-config-data\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.065002 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rscgc\" (UniqueName: \"kubernetes.io/projected/ffbec93d-02b4-4ac6-9f15-7267d9913543-kube-api-access-rscgc\") pod \"ceilometer-0\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.205274 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.671613 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.751752 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21ab812e-ceb0-4fe6-a585-06fea29d6b71" path="/var/lib/kubelet/pods/21ab812e-ceb0-4fe6-a585-06fea29d6b71/volumes" Sep 29 13:01:33 crc kubenswrapper[4611]: I0929 13:01:33.781754 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerStarted","Data":"da6191a2bbeb35d1d391c248063083a95a02db7ad3be6f321040c27fd63ee132"} Sep 29 13:01:34 crc kubenswrapper[4611]: I0929 13:01:34.790431 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerStarted","Data":"125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94"} Sep 29 13:01:34 crc kubenswrapper[4611]: I0929 13:01:34.790976 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerStarted","Data":"e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b"} Sep 29 13:01:35 crc kubenswrapper[4611]: I0929 13:01:35.809051 4611 generic.go:334] "Generic (PLEG): container finished" podID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerID="02e3749cae440178d08377420327e1a0040c2355a5bcef666f74ba88526fabe1" exitCode=137 Sep 29 13:01:35 crc kubenswrapper[4611]: I0929 13:01:35.809764 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerDied","Data":"02e3749cae440178d08377420327e1a0040c2355a5bcef666f74ba88526fabe1"} Sep 29 13:01:35 crc kubenswrapper[4611]: I0929 13:01:35.816370 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerStarted","Data":"9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6"} Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.248339 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.417663 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a263e8a7-9837-4368-aa41-01bd60fabd6d-logs\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.417730 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-scripts\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.417770 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-secret-key\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.417812 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz8sk\" (UniqueName: \"kubernetes.io/projected/a263e8a7-9837-4368-aa41-01bd60fabd6d-kube-api-access-cz8sk\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.417844 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-combined-ca-bundle\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.417995 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-config-data\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.418039 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-tls-certs\") pod \"a263e8a7-9837-4368-aa41-01bd60fabd6d\" (UID: \"a263e8a7-9837-4368-aa41-01bd60fabd6d\") " Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.418289 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a263e8a7-9837-4368-aa41-01bd60fabd6d-logs" (OuterVolumeSpecName: "logs") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.418795 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a263e8a7-9837-4368-aa41-01bd60fabd6d-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.442957 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.443367 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a263e8a7-9837-4368-aa41-01bd60fabd6d-kube-api-access-cz8sk" (OuterVolumeSpecName: "kube-api-access-cz8sk") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "kube-api-access-cz8sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.469601 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-config-data" (OuterVolumeSpecName: "config-data") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.485292 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-scripts" (OuterVolumeSpecName: "scripts") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.504033 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.521721 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.521928 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a263e8a7-9837-4368-aa41-01bd60fabd6d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.522022 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.522084 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz8sk\" (UniqueName: \"kubernetes.io/projected/a263e8a7-9837-4368-aa41-01bd60fabd6d-kube-api-access-cz8sk\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.522139 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.533306 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "a263e8a7-9837-4368-aa41-01bd60fabd6d" (UID: "a263e8a7-9837-4368-aa41-01bd60fabd6d"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.623804 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a263e8a7-9837-4368-aa41-01bd60fabd6d-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.828326 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c9489c674-t4fp8" event={"ID":"a263e8a7-9837-4368-aa41-01bd60fabd6d","Type":"ContainerDied","Data":"7ffd1240f4a44fa6f4c042cf9efdde3e3be729b73ab479c4ce989e68b21f341a"} Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.828585 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c9489c674-t4fp8" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.828642 4611 scope.go:117] "RemoveContainer" containerID="c6aa15f2e771bb555ed0a0d2d6eb265e41f78d80a168a4a2e6d43155dae5b0ea" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.831356 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerStarted","Data":"2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9"} Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.832205 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.859970 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9932235600000001 podStartE2EDuration="4.8599456s" podCreationTimestamp="2025-09-29 13:01:32 +0000 UTC" firstStartedPulling="2025-09-29 13:01:33.680883027 +0000 UTC m=+1280.572402643" lastFinishedPulling="2025-09-29 13:01:36.547605077 +0000 UTC m=+1283.439124683" observedRunningTime="2025-09-29 13:01:36.856045147 +0000 UTC m=+1283.747564763" watchObservedRunningTime="2025-09-29 13:01:36.8599456 +0000 UTC m=+1283.751465206" Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.880125 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c9489c674-t4fp8"] Sep 29 13:01:36 crc kubenswrapper[4611]: I0929 13:01:36.891202 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5c9489c674-t4fp8"] Sep 29 13:01:37 crc kubenswrapper[4611]: I0929 13:01:37.006698 4611 scope.go:117] "RemoveContainer" containerID="02e3749cae440178d08377420327e1a0040c2355a5bcef666f74ba88526fabe1" Sep 29 13:01:37 crc kubenswrapper[4611]: I0929 13:01:37.747862 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" path="/var/lib/kubelet/pods/a263e8a7-9837-4368-aa41-01bd60fabd6d/volumes" Sep 29 13:01:45 crc kubenswrapper[4611]: I0929 13:01:45.917387 4611 generic.go:334] "Generic (PLEG): container finished" podID="15aa0847-f436-4999-9622-92c588953523" containerID="028d5c67557afc2c896e97de914d5a106bec8c6d0751b728381a5a09c751105e" exitCode=0 Sep 29 13:01:45 crc kubenswrapper[4611]: I0929 13:01:45.917473 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-njndr" event={"ID":"15aa0847-f436-4999-9622-92c588953523","Type":"ContainerDied","Data":"028d5c67557afc2c896e97de914d5a106bec8c6d0751b728381a5a09c751105e"} Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.310069 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.325899 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-combined-ca-bundle\") pod \"15aa0847-f436-4999-9622-92c588953523\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.326004 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-scripts\") pod \"15aa0847-f436-4999-9622-92c588953523\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.326096 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8c88\" (UniqueName: \"kubernetes.io/projected/15aa0847-f436-4999-9622-92c588953523-kube-api-access-z8c88\") pod \"15aa0847-f436-4999-9622-92c588953523\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.326144 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-config-data\") pod \"15aa0847-f436-4999-9622-92c588953523\" (UID: \"15aa0847-f436-4999-9622-92c588953523\") " Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.335349 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15aa0847-f436-4999-9622-92c588953523-kube-api-access-z8c88" (OuterVolumeSpecName: "kube-api-access-z8c88") pod "15aa0847-f436-4999-9622-92c588953523" (UID: "15aa0847-f436-4999-9622-92c588953523"). InnerVolumeSpecName "kube-api-access-z8c88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.344914 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-scripts" (OuterVolumeSpecName: "scripts") pod "15aa0847-f436-4999-9622-92c588953523" (UID: "15aa0847-f436-4999-9622-92c588953523"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.359556 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-config-data" (OuterVolumeSpecName: "config-data") pod "15aa0847-f436-4999-9622-92c588953523" (UID: "15aa0847-f436-4999-9622-92c588953523"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.379398 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15aa0847-f436-4999-9622-92c588953523" (UID: "15aa0847-f436-4999-9622-92c588953523"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.428012 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.428050 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8c88\" (UniqueName: \"kubernetes.io/projected/15aa0847-f436-4999-9622-92c588953523-kube-api-access-z8c88\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.428063 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.428072 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15aa0847-f436-4999-9622-92c588953523-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.937359 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-njndr" event={"ID":"15aa0847-f436-4999-9622-92c588953523","Type":"ContainerDied","Data":"f6a4baa3a88bc50fbf51c2ac835aee91b496d4c013ffd49d38ef6b765f6f5256"} Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.937403 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6a4baa3a88bc50fbf51c2ac835aee91b496d4c013ffd49d38ef6b765f6f5256" Sep 29 13:01:47 crc kubenswrapper[4611]: I0929 13:01:47.937456 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-njndr" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.052832 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 13:01:48 crc kubenswrapper[4611]: E0929 13:01:48.053336 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053354 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" Sep 29 13:01:48 crc kubenswrapper[4611]: E0929 13:01:48.053370 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053377 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" Sep 29 13:01:48 crc kubenswrapper[4611]: E0929 13:01:48.053391 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15aa0847-f436-4999-9622-92c588953523" containerName="nova-cell0-conductor-db-sync" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053399 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="15aa0847-f436-4999-9622-92c588953523" containerName="nova-cell0-conductor-db-sync" Sep 29 13:01:48 crc kubenswrapper[4611]: E0929 13:01:48.053417 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon-log" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053426 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon-log" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053666 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053681 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053694 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="15aa0847-f436-4999-9622-92c588953523" containerName="nova-cell0-conductor-db-sync" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.053708 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a263e8a7-9837-4368-aa41-01bd60fabd6d" containerName="horizon-log" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.054475 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.108353 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.110285 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7l44j" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.140978 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.245233 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed4e5355-c0d7-4a82-ad50-07e42cd38045-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.245307 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc7dl\" (UniqueName: \"kubernetes.io/projected/ed4e5355-c0d7-4a82-ad50-07e42cd38045-kube-api-access-pc7dl\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.245363 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed4e5355-c0d7-4a82-ad50-07e42cd38045-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.347654 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed4e5355-c0d7-4a82-ad50-07e42cd38045-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.348507 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc7dl\" (UniqueName: \"kubernetes.io/projected/ed4e5355-c0d7-4a82-ad50-07e42cd38045-kube-api-access-pc7dl\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.348707 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed4e5355-c0d7-4a82-ad50-07e42cd38045-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.352642 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed4e5355-c0d7-4a82-ad50-07e42cd38045-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.367474 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed4e5355-c0d7-4a82-ad50-07e42cd38045-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.373763 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc7dl\" (UniqueName: \"kubernetes.io/projected/ed4e5355-c0d7-4a82-ad50-07e42cd38045-kube-api-access-pc7dl\") pod \"nova-cell0-conductor-0\" (UID: \"ed4e5355-c0d7-4a82-ad50-07e42cd38045\") " pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.419390 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.865388 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 13:01:48 crc kubenswrapper[4611]: I0929 13:01:48.948929 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ed4e5355-c0d7-4a82-ad50-07e42cd38045","Type":"ContainerStarted","Data":"f4b2657bd892a8ead6dac8ee0654ef8777009596c977cf9aa3f48207b48fbd7e"} Sep 29 13:01:49 crc kubenswrapper[4611]: I0929 13:01:49.959012 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ed4e5355-c0d7-4a82-ad50-07e42cd38045","Type":"ContainerStarted","Data":"e391681df497a418cf9c43fc7efd3f9607de005a6b19a78800fa6d0fcdab934e"} Sep 29 13:01:49 crc kubenswrapper[4611]: I0929 13:01:49.959387 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:49 crc kubenswrapper[4611]: I0929 13:01:49.977489 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.977467184 podStartE2EDuration="1.977467184s" podCreationTimestamp="2025-09-29 13:01:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:01:49.974790166 +0000 UTC m=+1296.866309792" watchObservedRunningTime="2025-09-29 13:01:49.977467184 +0000 UTC m=+1296.868986790" Sep 29 13:01:58 crc kubenswrapper[4611]: I0929 13:01:58.449842 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 13:01:58 crc kubenswrapper[4611]: I0929 13:01:58.993886 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-nlhnf"] Sep 29 13:01:58 crc kubenswrapper[4611]: I0929 13:01:58.995026 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:58 crc kubenswrapper[4611]: I0929 13:01:58.997609 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 13:01:58 crc kubenswrapper[4611]: I0929 13:01:58.997978 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.012834 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-nlhnf"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.155571 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-config-data\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.155692 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-scripts\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.155900 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nqfr\" (UniqueName: \"kubernetes.io/projected/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-kube-api-access-8nqfr\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.155956 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.214473 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.220386 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.225320 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.253030 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.257502 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nqfr\" (UniqueName: \"kubernetes.io/projected/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-kube-api-access-8nqfr\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.257578 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.257606 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-config-data\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.258264 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-scripts\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.299765 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nqfr\" (UniqueName: \"kubernetes.io/projected/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-kube-api-access-8nqfr\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.304186 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-scripts\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.305769 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.312059 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-config-data\") pod \"nova-cell0-cell-mapping-nlhnf\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.313663 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.345644 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.353475 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.354441 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.359682 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq7vm\" (UniqueName: \"kubernetes.io/projected/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-kube-api-access-fq7vm\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.359820 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-config-data\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.359872 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.374602 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.461964 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-config-data\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.462008 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.462049 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.462106 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.462127 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2p8s\" (UniqueName: \"kubernetes.io/projected/3a34b746-a2fc-403d-9104-58aef93a7154-kube-api-access-m2p8s\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.462151 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq7vm\" (UniqueName: \"kubernetes.io/projected/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-kube-api-access-fq7vm\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.476251 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.494219 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-config-data\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.510679 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.512391 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.516204 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.531250 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq7vm\" (UniqueName: \"kubernetes.io/projected/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-kube-api-access-fq7vm\") pod \"nova-scheduler-0\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.582467 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.701052 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702163 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702285 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702382 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702440 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e289b1-fa23-47f5-bd9b-c873abc229e6-logs\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702486 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6tc8\" (UniqueName: \"kubernetes.io/projected/24e289b1-fa23-47f5-bd9b-c873abc229e6-kube-api-access-w6tc8\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702518 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.702549 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2p8s\" (UniqueName: \"kubernetes.io/projected/3a34b746-a2fc-403d-9104-58aef93a7154-kube-api-access-m2p8s\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.722511 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.773405 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.787433 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2p8s\" (UniqueName: \"kubernetes.io/projected/3a34b746-a2fc-403d-9104-58aef93a7154-kube-api-access-m2p8s\") pod \"nova-cell1-novncproxy-0\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.825444 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.825499 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e289b1-fa23-47f5-bd9b-c873abc229e6-logs\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.825526 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6tc8\" (UniqueName: \"kubernetes.io/projected/24e289b1-fa23-47f5-bd9b-c873abc229e6-kube-api-access-w6tc8\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.825566 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.831113 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e289b1-fa23-47f5-bd9b-c873abc229e6-logs\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.855272 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.880313 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.882372 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.889939 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.900443 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6tc8\" (UniqueName: \"kubernetes.io/projected/24e289b1-fa23-47f5-bd9b-c873abc229e6-kube-api-access-w6tc8\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.906247 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.916204 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b9876949c-pzwtc"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.927568 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.940136 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b9876949c-pzwtc"] Sep 29 13:01:59 crc kubenswrapper[4611]: I0929 13:01:59.949392 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data\") pod \"nova-api-0\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " pod="openstack/nova-api-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033208 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-config-data\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033270 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shdwx\" (UniqueName: \"kubernetes.io/projected/30738414-5a70-41ef-a024-9494359b8b2e-kube-api-access-shdwx\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033776 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-svc\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033812 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033847 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-swift-storage-0\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033910 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-config\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033936 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033961 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033977 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a21427-2aab-42ae-bed8-b94a1e0997b5-logs\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.033993 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg6qb\" (UniqueName: \"kubernetes.io/projected/a1a21427-2aab-42ae-bed8-b94a1e0997b5-kube-api-access-fg6qb\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.066581 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135168 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-config\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135212 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135235 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135249 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a21427-2aab-42ae-bed8-b94a1e0997b5-logs\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135265 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg6qb\" (UniqueName: \"kubernetes.io/projected/a1a21427-2aab-42ae-bed8-b94a1e0997b5-kube-api-access-fg6qb\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135313 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-config-data\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135339 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shdwx\" (UniqueName: \"kubernetes.io/projected/30738414-5a70-41ef-a024-9494359b8b2e-kube-api-access-shdwx\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135376 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-svc\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135393 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.135449 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-swift-storage-0\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.136235 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-swift-storage-0\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.137116 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-config\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.137760 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.138413 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a21427-2aab-42ae-bed8-b94a1e0997b5-logs\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.139243 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-svc\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.139909 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.142612 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-config-data\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.144574 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.159246 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg6qb\" (UniqueName: \"kubernetes.io/projected/a1a21427-2aab-42ae-bed8-b94a1e0997b5-kube-api-access-fg6qb\") pod \"nova-metadata-0\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.162083 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shdwx\" (UniqueName: \"kubernetes.io/projected/30738414-5a70-41ef-a024-9494359b8b2e-kube-api-access-shdwx\") pod \"dnsmasq-dns-5b9876949c-pzwtc\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.193175 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.230091 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.268815 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.415275 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-nlhnf"] Sep 29 13:02:00 crc kubenswrapper[4611]: W0929 13:02:00.539131 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7ea831d_4481_47e7_a96b_f3cee6f1d00c.slice/crio-00f4f7bd6f2366d438c4c0a6036369cc0b64d759f49bd9aba79f00006095b693 WatchSource:0}: Error finding container 00f4f7bd6f2366d438c4c0a6036369cc0b64d759f49bd9aba79f00006095b693: Status 404 returned error can't find the container with id 00f4f7bd6f2366d438c4c0a6036369cc0b64d759f49bd9aba79f00006095b693 Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.565028 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:00 crc kubenswrapper[4611]: W0929 13:02:00.581508 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc814bc12_1c7b_4d7b_a477_dd4407d3b1b7.slice/crio-35c7ace4e87535224c59a9eb84d9f4257f78cbfe84afaad56ea0c189f4117dff WatchSource:0}: Error finding container 35c7ace4e87535224c59a9eb84d9f4257f78cbfe84afaad56ea0c189f4117dff: Status 404 returned error can't find the container with id 35c7ace4e87535224c59a9eb84d9f4257f78cbfe84afaad56ea0c189f4117dff Sep 29 13:02:00 crc kubenswrapper[4611]: I0929 13:02:00.706950 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.081469 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nlhnf" event={"ID":"a7ea831d-4481-47e7-a96b-f3cee6f1d00c","Type":"ContainerStarted","Data":"00f4f7bd6f2366d438c4c0a6036369cc0b64d759f49bd9aba79f00006095b693"} Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.083150 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3a34b746-a2fc-403d-9104-58aef93a7154","Type":"ContainerStarted","Data":"05e71df061c1b1f31df631eff82a33fcb9ef87cfcef48677b2924888cbc9e3f3"} Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.085009 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fdss8"] Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.086247 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.091060 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.091447 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.092080 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7","Type":"ContainerStarted","Data":"35c7ace4e87535224c59a9eb84d9f4257f78cbfe84afaad56ea0c189f4117dff"} Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.111108 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fdss8"] Sep 29 13:02:01 crc kubenswrapper[4611]: W0929 13:02:01.183874 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1a21427_2aab_42ae_bed8_b94a1e0997b5.slice/crio-2d9a141a711e4d31b8f2ce2e69063758f69853e1979dc20f52aaf459ba5afd34 WatchSource:0}: Error finding container 2d9a141a711e4d31b8f2ce2e69063758f69853e1979dc20f52aaf459ba5afd34: Status 404 returned error can't find the container with id 2d9a141a711e4d31b8f2ce2e69063758f69853e1979dc20f52aaf459ba5afd34 Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.191192 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.202452 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-config-data\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.202532 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-975l2\" (UniqueName: \"kubernetes.io/projected/30289ed8-4668-459a-9e89-698bea27c2f0-kube-api-access-975l2\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.202557 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.202613 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-scripts\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.301811 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.303668 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-scripts\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.303792 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-config-data\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.303832 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-975l2\" (UniqueName: \"kubernetes.io/projected/30289ed8-4668-459a-9e89-698bea27c2f0-kube-api-access-975l2\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.303852 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.310654 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.319561 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-config-data\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.331174 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-scripts\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.379119 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b9876949c-pzwtc"] Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.387292 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-975l2\" (UniqueName: \"kubernetes.io/projected/30289ed8-4668-459a-9e89-698bea27c2f0-kube-api-access-975l2\") pod \"nova-cell1-conductor-db-sync-fdss8\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.471107 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:01 crc kubenswrapper[4611]: I0929 13:02:01.998250 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fdss8"] Sep 29 13:02:02 crc kubenswrapper[4611]: W0929 13:02:02.014271 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30289ed8_4668_459a_9e89_698bea27c2f0.slice/crio-03b45725e014e42e8703bae579d7b1db2d96ff598f42f86916ca0457fd949a1a WatchSource:0}: Error finding container 03b45725e014e42e8703bae579d7b1db2d96ff598f42f86916ca0457fd949a1a: Status 404 returned error can't find the container with id 03b45725e014e42e8703bae579d7b1db2d96ff598f42f86916ca0457fd949a1a Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.121754 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fdss8" event={"ID":"30289ed8-4668-459a-9e89-698bea27c2f0","Type":"ContainerStarted","Data":"03b45725e014e42e8703bae579d7b1db2d96ff598f42f86916ca0457fd949a1a"} Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.123946 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nlhnf" event={"ID":"a7ea831d-4481-47e7-a96b-f3cee6f1d00c","Type":"ContainerStarted","Data":"50120a6644c4bbfbbd2b699e457f564c98197c6cfa7ffb3e367585d239e3f6e9"} Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.142359 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-nlhnf" podStartSLOduration=4.14233724 podStartE2EDuration="4.14233724s" podCreationTimestamp="2025-09-29 13:01:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:02.140465306 +0000 UTC m=+1309.031984912" watchObservedRunningTime="2025-09-29 13:02:02.14233724 +0000 UTC m=+1309.033856846" Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.150971 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"24e289b1-fa23-47f5-bd9b-c873abc229e6","Type":"ContainerStarted","Data":"a9296d2ec3232070ec778f93cb6abcd4f563e2e633f9432a65754a68d4ae69d2"} Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.162239 4611 generic.go:334] "Generic (PLEG): container finished" podID="30738414-5a70-41ef-a024-9494359b8b2e" containerID="1be7cd241a53244bc8b74954c63fdaaff22816917b9f04c159940621e8878be1" exitCode=0 Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.162360 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" event={"ID":"30738414-5a70-41ef-a024-9494359b8b2e","Type":"ContainerDied","Data":"1be7cd241a53244bc8b74954c63fdaaff22816917b9f04c159940621e8878be1"} Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.162391 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" event={"ID":"30738414-5a70-41ef-a024-9494359b8b2e","Type":"ContainerStarted","Data":"b412ba6b0d1a3bdcb7b0964d063799b9ca2ea8b01bd222c028bc517e161bebea"} Sep 29 13:02:02 crc kubenswrapper[4611]: I0929 13:02:02.173724 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a1a21427-2aab-42ae-bed8-b94a1e0997b5","Type":"ContainerStarted","Data":"2d9a141a711e4d31b8f2ce2e69063758f69853e1979dc20f52aaf459ba5afd34"} Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.184749 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fdss8" event={"ID":"30289ed8-4668-459a-9e89-698bea27c2f0","Type":"ContainerStarted","Data":"3b50f3e4a3e20ff1a194c72479ba28e67f37ed5ae96e7f2a964ba9e8ef5e2643"} Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.192285 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" event={"ID":"30738414-5a70-41ef-a024-9494359b8b2e","Type":"ContainerStarted","Data":"123dbe6cc20a8552bd1d2dd780fada2fee7a5f8d7b11085a1a604c577869e040"} Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.192737 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.238051 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-fdss8" podStartSLOduration=2.238031986 podStartE2EDuration="2.238031986s" podCreationTimestamp="2025-09-29 13:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:03.213581009 +0000 UTC m=+1310.105100615" watchObservedRunningTime="2025-09-29 13:02:03.238031986 +0000 UTC m=+1310.129551592" Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.259389 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.303212 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" podStartSLOduration=4.30318501 podStartE2EDuration="4.30318501s" podCreationTimestamp="2025-09-29 13:01:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:03.236466301 +0000 UTC m=+1310.127985937" watchObservedRunningTime="2025-09-29 13:02:03.30318501 +0000 UTC m=+1310.194704626" Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.404327 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:03 crc kubenswrapper[4611]: I0929 13:02:03.424255 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:07 crc kubenswrapper[4611]: I0929 13:02:07.774979 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 13:02:07 crc kubenswrapper[4611]: I0929 13:02:07.776836 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b739cb68-afe4-46e3-912c-318498feeb54" containerName="kube-state-metrics" containerID="cri-o://f76e49e07d3dc77213c48fa8bef58f0164e66dbb0757d08bded4e27a0d75fbf6" gracePeriod=30 Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.253115 4611 generic.go:334] "Generic (PLEG): container finished" podID="b739cb68-afe4-46e3-912c-318498feeb54" containerID="f76e49e07d3dc77213c48fa8bef58f0164e66dbb0757d08bded4e27a0d75fbf6" exitCode=2 Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.253192 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b739cb68-afe4-46e3-912c-318498feeb54","Type":"ContainerDied","Data":"f76e49e07d3dc77213c48fa8bef58f0164e66dbb0757d08bded4e27a0d75fbf6"} Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.880585 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.881130 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-central-agent" containerID="cri-o://e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b" gracePeriod=30 Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.881234 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="sg-core" containerID="cri-o://9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6" gracePeriod=30 Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.881380 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="proxy-httpd" containerID="cri-o://2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9" gracePeriod=30 Sep 29 13:02:09 crc kubenswrapper[4611]: I0929 13:02:09.881250 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-notification-agent" containerID="cri-o://125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94" gracePeriod=30 Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.266405 4611 generic.go:334] "Generic (PLEG): container finished" podID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerID="2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9" exitCode=0 Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.266440 4611 generic.go:334] "Generic (PLEG): container finished" podID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerID="9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6" exitCode=2 Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.266451 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerDied","Data":"2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9"} Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.266521 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerDied","Data":"9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6"} Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.270283 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.342003 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c7b5795c-4r8z6"] Sep 29 13:02:10 crc kubenswrapper[4611]: I0929 13:02:10.342251 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerName="dnsmasq-dns" containerID="cri-o://6e001040663132fa6d98e1547c84d37b6fca049464c8bab2177e1d60f78b2508" gracePeriod=10 Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.139767 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.209774 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9xx7\" (UniqueName: \"kubernetes.io/projected/b739cb68-afe4-46e3-912c-318498feeb54-kube-api-access-g9xx7\") pod \"b739cb68-afe4-46e3-912c-318498feeb54\" (UID: \"b739cb68-afe4-46e3-912c-318498feeb54\") " Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.217398 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b739cb68-afe4-46e3-912c-318498feeb54-kube-api-access-g9xx7" (OuterVolumeSpecName: "kube-api-access-g9xx7") pod "b739cb68-afe4-46e3-912c-318498feeb54" (UID: "b739cb68-afe4-46e3-912c-318498feeb54"). InnerVolumeSpecName "kube-api-access-g9xx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.281958 4611 generic.go:334] "Generic (PLEG): container finished" podID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerID="e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b" exitCode=0 Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.282027 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerDied","Data":"e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b"} Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.285152 4611 generic.go:334] "Generic (PLEG): container finished" podID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerID="6e001040663132fa6d98e1547c84d37b6fca049464c8bab2177e1d60f78b2508" exitCode=0 Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.285198 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" event={"ID":"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2","Type":"ContainerDied","Data":"6e001040663132fa6d98e1547c84d37b6fca049464c8bab2177e1d60f78b2508"} Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.286566 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b739cb68-afe4-46e3-912c-318498feeb54","Type":"ContainerDied","Data":"7a70be62101e122e7cdb906015a35ed0d3a67c26b1110c88078681c83da6a1e9"} Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.286594 4611 scope.go:117] "RemoveContainer" containerID="f76e49e07d3dc77213c48fa8bef58f0164e66dbb0757d08bded4e27a0d75fbf6" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.286710 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.316894 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9xx7\" (UniqueName: \"kubernetes.io/projected/b739cb68-afe4-46e3-912c-318498feeb54-kube-api-access-g9xx7\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.435825 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.454985 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.477875 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 13:02:11 crc kubenswrapper[4611]: E0929 13:02:11.478357 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b739cb68-afe4-46e3-912c-318498feeb54" containerName="kube-state-metrics" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.478372 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b739cb68-afe4-46e3-912c-318498feeb54" containerName="kube-state-metrics" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.478645 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b739cb68-afe4-46e3-912c-318498feeb54" containerName="kube-state-metrics" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.479441 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.486171 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.486171 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.528938 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.532312 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.532377 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zc6t\" (UniqueName: \"kubernetes.io/projected/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-api-access-8zc6t\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.532749 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.532822 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.634186 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.634245 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.634385 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.634415 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zc6t\" (UniqueName: \"kubernetes.io/projected/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-api-access-8zc6t\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.640524 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.641313 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.641313 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.665105 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zc6t\" (UniqueName: \"kubernetes.io/projected/8a21947f-1b2a-4360-824a-b05cb82ed84d-kube-api-access-8zc6t\") pod \"kube-state-metrics-0\" (UID: \"8a21947f-1b2a-4360-824a-b05cb82ed84d\") " pod="openstack/kube-state-metrics-0" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.751312 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b739cb68-afe4-46e3-912c-318498feeb54" path="/var/lib/kubelet/pods/b739cb68-afe4-46e3-912c-318498feeb54/volumes" Sep 29 13:02:11 crc kubenswrapper[4611]: I0929 13:02:11.844604 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.313845 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" event={"ID":"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2","Type":"ContainerDied","Data":"44a6e699b19f95f403399ba0ba9d46a51baa6d4e878b0cd1c1817ea5a4d91401"} Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.314215 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44a6e699b19f95f403399ba0ba9d46a51baa6d4e878b0cd1c1817ea5a4d91401" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.318298 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a1a21427-2aab-42ae-bed8-b94a1e0997b5","Type":"ContainerStarted","Data":"7fb28917d3e5a3c4947dd9bf8a1956062e0e3417fe08106da6f9fed02de19f52"} Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.319675 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"24e289b1-fa23-47f5-bd9b-c873abc229e6","Type":"ContainerStarted","Data":"ca81bbc06d3005290aa476272a474d130aa0e39c1b583d53b7dd7d1680669400"} Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.320917 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7","Type":"ContainerStarted","Data":"2069b70d45403179684441ab6164c166e9f8c680d13da9871c613b079ca9e897"} Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.377569 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.419242 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.558802382 podStartE2EDuration="13.419218798s" podCreationTimestamp="2025-09-29 13:01:59 +0000 UTC" firstStartedPulling="2025-09-29 13:02:00.60630569 +0000 UTC m=+1307.497825296" lastFinishedPulling="2025-09-29 13:02:10.466722106 +0000 UTC m=+1317.358241712" observedRunningTime="2025-09-29 13:02:12.341056488 +0000 UTC m=+1319.232576104" watchObservedRunningTime="2025-09-29 13:02:12.419218798 +0000 UTC m=+1319.310738404" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.508822 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-config\") pod \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.508891 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-svc\") pod \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.508953 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-nb\") pod \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.509064 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cddkz\" (UniqueName: \"kubernetes.io/projected/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-kube-api-access-cddkz\") pod \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.509092 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-sb\") pod \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.509200 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-swift-storage-0\") pod \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\" (UID: \"e3613ed9-dc47-4f8b-b962-2ab7005bfeb2\") " Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.576071 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-kube-api-access-cddkz" (OuterVolumeSpecName: "kube-api-access-cddkz") pod "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" (UID: "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2"). InnerVolumeSpecName "kube-api-access-cddkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.616575 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cddkz\" (UniqueName: \"kubernetes.io/projected/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-kube-api-access-cddkz\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.768315 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" (UID: "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.786603 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-config" (OuterVolumeSpecName: "config") pod "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" (UID: "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.803253 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" (UID: "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.814953 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" (UID: "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.824160 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.824199 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.824235 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.824248 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.829415 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" (UID: "e3613ed9-dc47-4f8b-b962-2ab7005bfeb2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.868570 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 13:02:12 crc kubenswrapper[4611]: I0929 13:02:12.926478 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.347468 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3a34b746-a2fc-403d-9104-58aef93a7154","Type":"ContainerStarted","Data":"631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1"} Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.347539 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="3a34b746-a2fc-403d-9104-58aef93a7154" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1" gracePeriod=30 Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.350479 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8a21947f-1b2a-4360-824a-b05cb82ed84d","Type":"ContainerStarted","Data":"559dc748c0282d9b32841ded31f4911abe3dc0bd6f9ed4c335d979ac1a2b2d74"} Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.352053 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a1a21427-2aab-42ae-bed8-b94a1e0997b5","Type":"ContainerStarted","Data":"c2893b0b9f01955e7eb07304ac957b68829a00734e2f99d2e7e478f0716bab4d"} Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.352196 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-log" containerID="cri-o://7fb28917d3e5a3c4947dd9bf8a1956062e0e3417fe08106da6f9fed02de19f52" gracePeriod=30 Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.352397 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-metadata" containerID="cri-o://c2893b0b9f01955e7eb07304ac957b68829a00734e2f99d2e7e478f0716bab4d" gracePeriod=30 Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.358783 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"24e289b1-fa23-47f5-bd9b-c873abc229e6","Type":"ContainerStarted","Data":"18152e06beec7f0e17c71a62b98a91902c754428c84db34ca5b590634ec19267"} Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.358852 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c7b5795c-4r8z6" Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.375062 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.859976515 podStartE2EDuration="14.375037229s" podCreationTimestamp="2025-09-29 13:01:59 +0000 UTC" firstStartedPulling="2025-09-29 13:02:00.716669952 +0000 UTC m=+1307.608189558" lastFinishedPulling="2025-09-29 13:02:12.231730666 +0000 UTC m=+1319.123250272" observedRunningTime="2025-09-29 13:02:13.366531473 +0000 UTC m=+1320.258051079" watchObservedRunningTime="2025-09-29 13:02:13.375037229 +0000 UTC m=+1320.266556835" Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.399941 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.648045703 podStartE2EDuration="14.399924279s" podCreationTimestamp="2025-09-29 13:01:59 +0000 UTC" firstStartedPulling="2025-09-29 13:02:01.322921354 +0000 UTC m=+1308.214440960" lastFinishedPulling="2025-09-29 13:02:11.07479993 +0000 UTC m=+1317.966319536" observedRunningTime="2025-09-29 13:02:13.397750206 +0000 UTC m=+1320.289269812" watchObservedRunningTime="2025-09-29 13:02:13.399924279 +0000 UTC m=+1320.291443885" Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.424199 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.555756494 podStartE2EDuration="14.42418268s" podCreationTimestamp="2025-09-29 13:01:59 +0000 UTC" firstStartedPulling="2025-09-29 13:02:01.202853102 +0000 UTC m=+1308.094372708" lastFinishedPulling="2025-09-29 13:02:11.071279288 +0000 UTC m=+1317.962798894" observedRunningTime="2025-09-29 13:02:13.421299197 +0000 UTC m=+1320.312818823" watchObservedRunningTime="2025-09-29 13:02:13.42418268 +0000 UTC m=+1320.315702286" Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.449846 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c7b5795c-4r8z6"] Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.458495 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65c7b5795c-4r8z6"] Sep 29 13:02:13 crc kubenswrapper[4611]: I0929 13:02:13.751392 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" path="/var/lib/kubelet/pods/e3613ed9-dc47-4f8b-b962-2ab7005bfeb2/volumes" Sep 29 13:02:14 crc kubenswrapper[4611]: I0929 13:02:14.370462 4611 generic.go:334] "Generic (PLEG): container finished" podID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerID="c2893b0b9f01955e7eb07304ac957b68829a00734e2f99d2e7e478f0716bab4d" exitCode=0 Sep 29 13:02:14 crc kubenswrapper[4611]: I0929 13:02:14.370795 4611 generic.go:334] "Generic (PLEG): container finished" podID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerID="7fb28917d3e5a3c4947dd9bf8a1956062e0e3417fe08106da6f9fed02de19f52" exitCode=143 Sep 29 13:02:14 crc kubenswrapper[4611]: I0929 13:02:14.370533 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a1a21427-2aab-42ae-bed8-b94a1e0997b5","Type":"ContainerDied","Data":"c2893b0b9f01955e7eb07304ac957b68829a00734e2f99d2e7e478f0716bab4d"} Sep 29 13:02:14 crc kubenswrapper[4611]: I0929 13:02:14.370892 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a1a21427-2aab-42ae-bed8-b94a1e0997b5","Type":"ContainerDied","Data":"7fb28917d3e5a3c4947dd9bf8a1956062e0e3417fe08106da6f9fed02de19f52"} Sep 29 13:02:14 crc kubenswrapper[4611]: I0929 13:02:14.704691 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.070857 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.230669 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.230918 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.381445 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a1a21427-2aab-42ae-bed8-b94a1e0997b5","Type":"ContainerDied","Data":"2d9a141a711e4d31b8f2ce2e69063758f69853e1979dc20f52aaf459ba5afd34"} Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.382294 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d9a141a711e4d31b8f2ce2e69063758f69853e1979dc20f52aaf459ba5afd34" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.414526 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.486272 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-config-data\") pod \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.486366 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a21427-2aab-42ae-bed8-b94a1e0997b5-logs\") pod \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.486471 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-combined-ca-bundle\") pod \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.487938 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg6qb\" (UniqueName: \"kubernetes.io/projected/a1a21427-2aab-42ae-bed8-b94a1e0997b5-kube-api-access-fg6qb\") pod \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\" (UID: \"a1a21427-2aab-42ae-bed8-b94a1e0997b5\") " Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.489903 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1a21427-2aab-42ae-bed8-b94a1e0997b5-logs" (OuterVolumeSpecName: "logs") pod "a1a21427-2aab-42ae-bed8-b94a1e0997b5" (UID: "a1a21427-2aab-42ae-bed8-b94a1e0997b5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.508503 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1a21427-2aab-42ae-bed8-b94a1e0997b5-kube-api-access-fg6qb" (OuterVolumeSpecName: "kube-api-access-fg6qb") pod "a1a21427-2aab-42ae-bed8-b94a1e0997b5" (UID: "a1a21427-2aab-42ae-bed8-b94a1e0997b5"). InnerVolumeSpecName "kube-api-access-fg6qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.533128 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1a21427-2aab-42ae-bed8-b94a1e0997b5" (UID: "a1a21427-2aab-42ae-bed8-b94a1e0997b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.536196 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-config-data" (OuterVolumeSpecName: "config-data") pod "a1a21427-2aab-42ae-bed8-b94a1e0997b5" (UID: "a1a21427-2aab-42ae-bed8-b94a1e0997b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.590716 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg6qb\" (UniqueName: \"kubernetes.io/projected/a1a21427-2aab-42ae-bed8-b94a1e0997b5-kube-api-access-fg6qb\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.591235 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.591341 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1a21427-2aab-42ae-bed8-b94a1e0997b5-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:15 crc kubenswrapper[4611]: I0929 13:02:15.591447 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a21427-2aab-42ae-bed8-b94a1e0997b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.395873 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.396846 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8a21947f-1b2a-4360-824a-b05cb82ed84d","Type":"ContainerStarted","Data":"bd21f064ff83cd4d391c751265551870d20e61624547903f633eb9c95494487e"} Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.397058 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.422541 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.141629928 podStartE2EDuration="5.422522097s" podCreationTimestamp="2025-09-29 13:02:11 +0000 UTC" firstStartedPulling="2025-09-29 13:02:12.871048404 +0000 UTC m=+1319.762568010" lastFinishedPulling="2025-09-29 13:02:15.151940563 +0000 UTC m=+1322.043460179" observedRunningTime="2025-09-29 13:02:16.422131795 +0000 UTC m=+1323.313651401" watchObservedRunningTime="2025-09-29 13:02:16.422522097 +0000 UTC m=+1323.314041703" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.445352 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.456969 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.490527 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:16 crc kubenswrapper[4611]: E0929 13:02:16.491255 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerName="dnsmasq-dns" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.491337 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerName="dnsmasq-dns" Sep 29 13:02:16 crc kubenswrapper[4611]: E0929 13:02:16.491409 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-log" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.491466 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-log" Sep 29 13:02:16 crc kubenswrapper[4611]: E0929 13:02:16.491520 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerName="init" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.491583 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerName="init" Sep 29 13:02:16 crc kubenswrapper[4611]: E0929 13:02:16.491677 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-metadata" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.491765 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-metadata" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.492025 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3613ed9-dc47-4f8b-b962-2ab7005bfeb2" containerName="dnsmasq-dns" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.492112 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-metadata" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.492229 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" containerName="nova-metadata-log" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.493398 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.499025 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.499690 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.509960 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.609021 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-config-data\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.609404 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:16 crc kubenswrapper[4611]: I0929 13:02:16.609523 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.609956 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-logs\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.610075 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss7bq\" (UniqueName: \"kubernetes.io/projected/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-kube-api-access-ss7bq\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.711714 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-config-data\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.711846 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.711867 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.712564 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-logs\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.712646 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss7bq\" (UniqueName: \"kubernetes.io/projected/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-kube-api-access-ss7bq\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.713080 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-logs\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.716596 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-config-data\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.720723 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.727916 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.735111 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss7bq\" (UniqueName: \"kubernetes.io/projected/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-kube-api-access-ss7bq\") pod \"nova-metadata-0\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:16.839223 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.748380 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1a21427-2aab-42ae-bed8-b94a1e0997b5" path="/var/lib/kubelet/pods/a1a21427-2aab-42ae-bed8-b94a1e0997b5/volumes" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.895375 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945527 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-log-httpd\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945590 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-scripts\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945690 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-config-data\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945769 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rscgc\" (UniqueName: \"kubernetes.io/projected/ffbec93d-02b4-4ac6-9f15-7267d9913543-kube-api-access-rscgc\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945809 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-combined-ca-bundle\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945840 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-run-httpd\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.945865 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-sg-core-conf-yaml\") pod \"ffbec93d-02b4-4ac6-9f15-7267d9913543\" (UID: \"ffbec93d-02b4-4ac6-9f15-7267d9913543\") " Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.946299 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.946540 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.947482 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.965474 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffbec93d-02b4-4ac6-9f15-7267d9913543-kube-api-access-rscgc" (OuterVolumeSpecName: "kube-api-access-rscgc") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "kube-api-access-rscgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.975273 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-scripts" (OuterVolumeSpecName: "scripts") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:17 crc kubenswrapper[4611]: I0929 13:02:17.995644 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:18 crc kubenswrapper[4611]: W0929 13:02:18.016000 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae643ac0_acf8_4fe8_add7_ed326a6db7d2.slice/crio-ad51351d176333ccd6061347e0d0ec06255d36afda6dba6f514ad1afb69328a4 WatchSource:0}: Error finding container ad51351d176333ccd6061347e0d0ec06255d36afda6dba6f514ad1afb69328a4: Status 404 returned error can't find the container with id ad51351d176333ccd6061347e0d0ec06255d36afda6dba6f514ad1afb69328a4 Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.054885 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.061839 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ffbec93d-02b4-4ac6-9f15-7267d9913543-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.061883 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.061898 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.061910 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rscgc\" (UniqueName: \"kubernetes.io/projected/ffbec93d-02b4-4ac6-9f15-7267d9913543-kube-api-access-rscgc\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.119762 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.164312 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.178141 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-config-data" (OuterVolumeSpecName: "config-data") pod "ffbec93d-02b4-4ac6-9f15-7267d9913543" (UID: "ffbec93d-02b4-4ac6-9f15-7267d9913543"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.266342 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffbec93d-02b4-4ac6-9f15-7267d9913543-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.421115 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae643ac0-acf8-4fe8-add7-ed326a6db7d2","Type":"ContainerStarted","Data":"36d853bf30b4296e194c1046707f703715295919637e23e0255f00542a19c72e"} Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.421478 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae643ac0-acf8-4fe8-add7-ed326a6db7d2","Type":"ContainerStarted","Data":"ad51351d176333ccd6061347e0d0ec06255d36afda6dba6f514ad1afb69328a4"} Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.434409 4611 generic.go:334] "Generic (PLEG): container finished" podID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerID="125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94" exitCode=0 Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.434545 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerDied","Data":"125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94"} Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.434618 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ffbec93d-02b4-4ac6-9f15-7267d9913543","Type":"ContainerDied","Data":"da6191a2bbeb35d1d391c248063083a95a02db7ad3be6f321040c27fd63ee132"} Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.434672 4611 scope.go:117] "RemoveContainer" containerID="2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.434705 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.478575 4611 scope.go:117] "RemoveContainer" containerID="9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.492607 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.506692 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.514474 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.515006 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-central-agent" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515031 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-central-agent" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.515062 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="proxy-httpd" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515071 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="proxy-httpd" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.515086 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-notification-agent" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515095 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-notification-agent" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.515108 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="sg-core" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515117 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="sg-core" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515327 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="proxy-httpd" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515349 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-central-agent" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515364 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="ceilometer-notification-agent" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.515378 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" containerName="sg-core" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.519121 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.523117 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.523440 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.523612 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.527700 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.577026 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-log-httpd\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.577080 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-run-httpd\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.577110 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.578373 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.578449 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcj7g\" (UniqueName: \"kubernetes.io/projected/e804f089-d35a-462b-80f0-562ede9dcffb-kube-api-access-gcj7g\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.578589 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-scripts\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.578666 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-config-data\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.578689 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.608456 4611 scope.go:117] "RemoveContainer" containerID="125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.633795 4611 scope.go:117] "RemoveContainer" containerID="e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.659824 4611 scope.go:117] "RemoveContainer" containerID="2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.660425 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9\": container with ID starting with 2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9 not found: ID does not exist" containerID="2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.660472 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9"} err="failed to get container status \"2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9\": rpc error: code = NotFound desc = could not find container \"2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9\": container with ID starting with 2aaa83ce5366361795c458bdc13e370070f48042d6aa18be4b76dd87a869bcb9 not found: ID does not exist" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.660498 4611 scope.go:117] "RemoveContainer" containerID="9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.660957 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6\": container with ID starting with 9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6 not found: ID does not exist" containerID="9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.660986 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6"} err="failed to get container status \"9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6\": rpc error: code = NotFound desc = could not find container \"9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6\": container with ID starting with 9800210c2dbc095ebef13cd7510411b7c03b5c0a693a24149855600e95c3dbf6 not found: ID does not exist" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.661006 4611 scope.go:117] "RemoveContainer" containerID="125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.662846 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94\": container with ID starting with 125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94 not found: ID does not exist" containerID="125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.662903 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94"} err="failed to get container status \"125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94\": rpc error: code = NotFound desc = could not find container \"125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94\": container with ID starting with 125278c5ba418dc7a66be26f16ffeb8b1281369ebd6748a3b0764f03a0956d94 not found: ID does not exist" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.662940 4611 scope.go:117] "RemoveContainer" containerID="e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b" Sep 29 13:02:18 crc kubenswrapper[4611]: E0929 13:02:18.663971 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b\": container with ID starting with e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b not found: ID does not exist" containerID="e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.664026 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b"} err="failed to get container status \"e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b\": rpc error: code = NotFound desc = could not find container \"e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b\": container with ID starting with e563f26e22d57cbc4c3c84fa8b7898cd60d75646e63a25c4e45b8a276238302b not found: ID does not exist" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.681585 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-scripts\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.681893 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-config-data\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.682038 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.682166 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-log-httpd\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.682273 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-run-httpd\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.683203 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.683407 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.683494 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcj7g\" (UniqueName: \"kubernetes.io/projected/e804f089-d35a-462b-80f0-562ede9dcffb-kube-api-access-gcj7g\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.683160 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-run-httpd\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.682942 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-log-httpd\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.686456 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-scripts\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.687771 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.687874 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-config-data\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.690468 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.690799 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.705033 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcj7g\" (UniqueName: \"kubernetes.io/projected/e804f089-d35a-462b-80f0-562ede9dcffb-kube-api-access-gcj7g\") pod \"ceilometer-0\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " pod="openstack/ceilometer-0" Sep 29 13:02:18 crc kubenswrapper[4611]: I0929 13:02:18.912573 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.420937 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.464776 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerStarted","Data":"3e427212c442c22a93f50304ff4e1beb0d89e9382d28a19ff1c326d94f244022"} Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.490165 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae643ac0-acf8-4fe8-add7-ed326a6db7d2","Type":"ContainerStarted","Data":"0e23dafcdfcc5cc02d09e3a45ca948a2af805d1f69d1fbc92039eced0777126b"} Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.515449 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.515429458 podStartE2EDuration="3.515429458s" podCreationTimestamp="2025-09-29 13:02:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:19.51033277 +0000 UTC m=+1326.401852376" watchObservedRunningTime="2025-09-29 13:02:19.515429458 +0000 UTC m=+1326.406949064" Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.704154 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.746956 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffbec93d-02b4-4ac6-9f15-7267d9913543" path="/var/lib/kubelet/pods/ffbec93d-02b4-4ac6-9f15-7267d9913543/volumes" Sep 29 13:02:19 crc kubenswrapper[4611]: I0929 13:02:19.747758 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 13:02:20 crc kubenswrapper[4611]: I0929 13:02:20.193587 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:02:20 crc kubenswrapper[4611]: I0929 13:02:20.193905 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:02:20 crc kubenswrapper[4611]: I0929 13:02:20.515330 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerStarted","Data":"f4e410815c8ea1289b0e64473a3a332acbfe90368b0d2590fd975912246d57c4"} Sep 29 13:02:20 crc kubenswrapper[4611]: I0929 13:02:20.515370 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerStarted","Data":"a24c29159bd25e490e80fd3d79e95da7352285f60ab3bf33f9de7a440ae0d22b"} Sep 29 13:02:20 crc kubenswrapper[4611]: I0929 13:02:20.566515 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 13:02:21 crc kubenswrapper[4611]: I0929 13:02:21.284323 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:02:21 crc kubenswrapper[4611]: I0929 13:02:21.284519 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:02:21 crc kubenswrapper[4611]: I0929 13:02:21.551102 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerStarted","Data":"15d8f1fa6a7cd836c02cd421c9b085a365231b6b6fd809ba182611efe580b1a3"} Sep 29 13:02:21 crc kubenswrapper[4611]: I0929 13:02:21.840437 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:02:21 crc kubenswrapper[4611]: I0929 13:02:21.840817 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:02:21 crc kubenswrapper[4611]: I0929 13:02:21.858519 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 13:02:22 crc kubenswrapper[4611]: I0929 13:02:22.564061 4611 generic.go:334] "Generic (PLEG): container finished" podID="a7ea831d-4481-47e7-a96b-f3cee6f1d00c" containerID="50120a6644c4bbfbbd2b699e457f564c98197c6cfa7ffb3e367585d239e3f6e9" exitCode=0 Sep 29 13:02:22 crc kubenswrapper[4611]: I0929 13:02:22.564122 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nlhnf" event={"ID":"a7ea831d-4481-47e7-a96b-f3cee6f1d00c","Type":"ContainerDied","Data":"50120a6644c4bbfbbd2b699e457f564c98197c6cfa7ffb3e367585d239e3f6e9"} Sep 29 13:02:23 crc kubenswrapper[4611]: I0929 13:02:23.577369 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerStarted","Data":"41358523d62ae7373c4120e7334d7affae2bbe62ea6ca3a833cd4e321eb7ca7e"} Sep 29 13:02:23 crc kubenswrapper[4611]: I0929 13:02:23.612121 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.911678426 podStartE2EDuration="5.612099736s" podCreationTimestamp="2025-09-29 13:02:18 +0000 UTC" firstStartedPulling="2025-09-29 13:02:19.422564342 +0000 UTC m=+1326.314083948" lastFinishedPulling="2025-09-29 13:02:23.122985652 +0000 UTC m=+1330.014505258" observedRunningTime="2025-09-29 13:02:23.61015144 +0000 UTC m=+1330.501671056" watchObservedRunningTime="2025-09-29 13:02:23.612099736 +0000 UTC m=+1330.503619352" Sep 29 13:02:23 crc kubenswrapper[4611]: I0929 13:02:23.946420 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.103316 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-config-data\") pod \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.103453 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-combined-ca-bundle\") pod \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.103489 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nqfr\" (UniqueName: \"kubernetes.io/projected/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-kube-api-access-8nqfr\") pod \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.103641 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-scripts\") pod \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\" (UID: \"a7ea831d-4481-47e7-a96b-f3cee6f1d00c\") " Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.111764 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-scripts" (OuterVolumeSpecName: "scripts") pod "a7ea831d-4481-47e7-a96b-f3cee6f1d00c" (UID: "a7ea831d-4481-47e7-a96b-f3cee6f1d00c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.111785 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-kube-api-access-8nqfr" (OuterVolumeSpecName: "kube-api-access-8nqfr") pod "a7ea831d-4481-47e7-a96b-f3cee6f1d00c" (UID: "a7ea831d-4481-47e7-a96b-f3cee6f1d00c"). InnerVolumeSpecName "kube-api-access-8nqfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.141279 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-config-data" (OuterVolumeSpecName: "config-data") pod "a7ea831d-4481-47e7-a96b-f3cee6f1d00c" (UID: "a7ea831d-4481-47e7-a96b-f3cee6f1d00c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.145796 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7ea831d-4481-47e7-a96b-f3cee6f1d00c" (UID: "a7ea831d-4481-47e7-a96b-f3cee6f1d00c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.205947 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.205987 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.206001 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nqfr\" (UniqueName: \"kubernetes.io/projected/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-kube-api-access-8nqfr\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.206013 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7ea831d-4481-47e7-a96b-f3cee6f1d00c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.588178 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nlhnf" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.588166 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nlhnf" event={"ID":"a7ea831d-4481-47e7-a96b-f3cee6f1d00c","Type":"ContainerDied","Data":"00f4f7bd6f2366d438c4c0a6036369cc0b64d759f49bd9aba79f00006095b693"} Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.588222 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00f4f7bd6f2366d438c4c0a6036369cc0b64d759f49bd9aba79f00006095b693" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.588334 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.774926 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.775525 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-log" containerID="cri-o://ca81bbc06d3005290aa476272a474d130aa0e39c1b583d53b7dd7d1680669400" gracePeriod=30 Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.775664 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-api" containerID="cri-o://18152e06beec7f0e17c71a62b98a91902c754428c84db34ca5b590634ec19267" gracePeriod=30 Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.794770 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.795007 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" containerName="nova-scheduler-scheduler" containerID="cri-o://2069b70d45403179684441ab6164c166e9f8c680d13da9871c613b079ca9e897" gracePeriod=30 Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.864404 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.864852 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-log" containerID="cri-o://36d853bf30b4296e194c1046707f703715295919637e23e0255f00542a19c72e" gracePeriod=30 Sep 29 13:02:24 crc kubenswrapper[4611]: I0929 13:02:24.864880 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-metadata" containerID="cri-o://0e23dafcdfcc5cc02d09e3a45ca948a2af805d1f69d1fbc92039eced0777126b" gracePeriod=30 Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.605111 4611 generic.go:334] "Generic (PLEG): container finished" podID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerID="0e23dafcdfcc5cc02d09e3a45ca948a2af805d1f69d1fbc92039eced0777126b" exitCode=0 Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.605141 4611 generic.go:334] "Generic (PLEG): container finished" podID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerID="36d853bf30b4296e194c1046707f703715295919637e23e0255f00542a19c72e" exitCode=143 Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.605223 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae643ac0-acf8-4fe8-add7-ed326a6db7d2","Type":"ContainerDied","Data":"0e23dafcdfcc5cc02d09e3a45ca948a2af805d1f69d1fbc92039eced0777126b"} Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.605261 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae643ac0-acf8-4fe8-add7-ed326a6db7d2","Type":"ContainerDied","Data":"36d853bf30b4296e194c1046707f703715295919637e23e0255f00542a19c72e"} Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.612044 4611 generic.go:334] "Generic (PLEG): container finished" podID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerID="ca81bbc06d3005290aa476272a474d130aa0e39c1b583d53b7dd7d1680669400" exitCode=143 Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.613066 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"24e289b1-fa23-47f5-bd9b-c873abc229e6","Type":"ContainerDied","Data":"ca81bbc06d3005290aa476272a474d130aa0e39c1b583d53b7dd7d1680669400"} Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.798911 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.948962 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-config-data\") pod \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.949423 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss7bq\" (UniqueName: \"kubernetes.io/projected/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-kube-api-access-ss7bq\") pod \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.949644 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-nova-metadata-tls-certs\") pod \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.949913 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-combined-ca-bundle\") pod \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.950027 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-logs\") pod \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\" (UID: \"ae643ac0-acf8-4fe8-add7-ed326a6db7d2\") " Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.950239 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-logs" (OuterVolumeSpecName: "logs") pod "ae643ac0-acf8-4fe8-add7-ed326a6db7d2" (UID: "ae643ac0-acf8-4fe8-add7-ed326a6db7d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.950816 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.967587 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-kube-api-access-ss7bq" (OuterVolumeSpecName: "kube-api-access-ss7bq") pod "ae643ac0-acf8-4fe8-add7-ed326a6db7d2" (UID: "ae643ac0-acf8-4fe8-add7-ed326a6db7d2"). InnerVolumeSpecName "kube-api-access-ss7bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:25 crc kubenswrapper[4611]: I0929 13:02:25.993804 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-config-data" (OuterVolumeSpecName: "config-data") pod "ae643ac0-acf8-4fe8-add7-ed326a6db7d2" (UID: "ae643ac0-acf8-4fe8-add7-ed326a6db7d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.008289 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae643ac0-acf8-4fe8-add7-ed326a6db7d2" (UID: "ae643ac0-acf8-4fe8-add7-ed326a6db7d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.052906 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.052940 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss7bq\" (UniqueName: \"kubernetes.io/projected/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-kube-api-access-ss7bq\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.052953 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.057696 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ae643ac0-acf8-4fe8-add7-ed326a6db7d2" (UID: "ae643ac0-acf8-4fe8-add7-ed326a6db7d2"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.154281 4611 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae643ac0-acf8-4fe8-add7-ed326a6db7d2-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.635651 4611 generic.go:334] "Generic (PLEG): container finished" podID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerID="18152e06beec7f0e17c71a62b98a91902c754428c84db34ca5b590634ec19267" exitCode=0 Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.636045 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"24e289b1-fa23-47f5-bd9b-c873abc229e6","Type":"ContainerDied","Data":"18152e06beec7f0e17c71a62b98a91902c754428c84db34ca5b590634ec19267"} Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.638364 4611 generic.go:334] "Generic (PLEG): container finished" podID="c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" containerID="2069b70d45403179684441ab6164c166e9f8c680d13da9871c613b079ca9e897" exitCode=0 Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.638423 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7","Type":"ContainerDied","Data":"2069b70d45403179684441ab6164c166e9f8c680d13da9871c613b079ca9e897"} Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.644324 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae643ac0-acf8-4fe8-add7-ed326a6db7d2","Type":"ContainerDied","Data":"ad51351d176333ccd6061347e0d0ec06255d36afda6dba6f514ad1afb69328a4"} Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.644371 4611 scope.go:117] "RemoveContainer" containerID="0e23dafcdfcc5cc02d09e3a45ca948a2af805d1f69d1fbc92039eced0777126b" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.644526 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.701467 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.722144 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.747369 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:26 crc kubenswrapper[4611]: E0929 13:02:26.748043 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-metadata" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.748351 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-metadata" Sep 29 13:02:26 crc kubenswrapper[4611]: E0929 13:02:26.748426 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-log" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.748476 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-log" Sep 29 13:02:26 crc kubenswrapper[4611]: E0929 13:02:26.748539 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7ea831d-4481-47e7-a96b-f3cee6f1d00c" containerName="nova-manage" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.748601 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7ea831d-4481-47e7-a96b-f3cee6f1d00c" containerName="nova-manage" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.748868 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7ea831d-4481-47e7-a96b-f3cee6f1d00c" containerName="nova-manage" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.748936 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-metadata" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.749005 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" containerName="nova-metadata-log" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.750199 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.754055 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.754254 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.763376 4611 scope.go:117] "RemoveContainer" containerID="36d853bf30b4296e194c1046707f703715295919637e23e0255f00542a19c72e" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.777694 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.870792 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.871117 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.871174 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-config-data\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.871204 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxdth\" (UniqueName: \"kubernetes.io/projected/5df993fe-7440-4ec0-97c7-498f4a999ad2-kube-api-access-jxdth\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.871287 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df993fe-7440-4ec0-97c7-498f4a999ad2-logs\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.974457 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df993fe-7440-4ec0-97c7-498f4a999ad2-logs\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.974541 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.974680 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.974720 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-config-data\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.974752 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxdth\" (UniqueName: \"kubernetes.io/projected/5df993fe-7440-4ec0-97c7-498f4a999ad2-kube-api-access-jxdth\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.975577 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df993fe-7440-4ec0-97c7-498f4a999ad2-logs\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.984118 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.985702 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:26 crc kubenswrapper[4611]: I0929 13:02:26.986444 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-config-data\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.004219 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxdth\" (UniqueName: \"kubernetes.io/projected/5df993fe-7440-4ec0-97c7-498f4a999ad2-kube-api-access-jxdth\") pod \"nova-metadata-0\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " pod="openstack/nova-metadata-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.094710 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.096405 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.106532 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183091 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-combined-ca-bundle\") pod \"24e289b1-fa23-47f5-bd9b-c873abc229e6\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183327 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6tc8\" (UniqueName: \"kubernetes.io/projected/24e289b1-fa23-47f5-bd9b-c873abc229e6-kube-api-access-w6tc8\") pod \"24e289b1-fa23-47f5-bd9b-c873abc229e6\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183385 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq7vm\" (UniqueName: \"kubernetes.io/projected/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-kube-api-access-fq7vm\") pod \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183443 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e289b1-fa23-47f5-bd9b-c873abc229e6-logs\") pod \"24e289b1-fa23-47f5-bd9b-c873abc229e6\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183505 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-combined-ca-bundle\") pod \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183575 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data\") pod \"24e289b1-fa23-47f5-bd9b-c873abc229e6\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.183610 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-config-data\") pod \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\" (UID: \"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7\") " Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.186647 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24e289b1-fa23-47f5-bd9b-c873abc229e6-logs" (OuterVolumeSpecName: "logs") pod "24e289b1-fa23-47f5-bd9b-c873abc229e6" (UID: "24e289b1-fa23-47f5-bd9b-c873abc229e6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.194148 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e289b1-fa23-47f5-bd9b-c873abc229e6-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.198064 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-kube-api-access-fq7vm" (OuterVolumeSpecName: "kube-api-access-fq7vm") pod "c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" (UID: "c814bc12-1c7b-4d7b-a477-dd4407d3b1b7"). InnerVolumeSpecName "kube-api-access-fq7vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.200194 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24e289b1-fa23-47f5-bd9b-c873abc229e6-kube-api-access-w6tc8" (OuterVolumeSpecName: "kube-api-access-w6tc8") pod "24e289b1-fa23-47f5-bd9b-c873abc229e6" (UID: "24e289b1-fa23-47f5-bd9b-c873abc229e6"). InnerVolumeSpecName "kube-api-access-w6tc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.234332 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-config-data" (OuterVolumeSpecName: "config-data") pod "c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" (UID: "c814bc12-1c7b-4d7b-a477-dd4407d3b1b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.301988 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data" (OuterVolumeSpecName: "config-data") pod "24e289b1-fa23-47f5-bd9b-c873abc229e6" (UID: "24e289b1-fa23-47f5-bd9b-c873abc229e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.303784 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data\") pod \"24e289b1-fa23-47f5-bd9b-c873abc229e6\" (UID: \"24e289b1-fa23-47f5-bd9b-c873abc229e6\") " Sep 29 13:02:27 crc kubenswrapper[4611]: W0929 13:02:27.304063 4611 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/24e289b1-fa23-47f5-bd9b-c873abc229e6/volumes/kubernetes.io~secret/config-data Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.304085 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data" (OuterVolumeSpecName: "config-data") pod "24e289b1-fa23-47f5-bd9b-c873abc229e6" (UID: "24e289b1-fa23-47f5-bd9b-c873abc229e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.304567 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq7vm\" (UniqueName: \"kubernetes.io/projected/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-kube-api-access-fq7vm\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.304595 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.304608 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.304620 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6tc8\" (UniqueName: \"kubernetes.io/projected/24e289b1-fa23-47f5-bd9b-c873abc229e6-kube-api-access-w6tc8\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.312809 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "24e289b1-fa23-47f5-bd9b-c873abc229e6" (UID: "24e289b1-fa23-47f5-bd9b-c873abc229e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.316810 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" (UID: "c814bc12-1c7b-4d7b-a477-dd4407d3b1b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.406860 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.407351 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e289b1-fa23-47f5-bd9b-c873abc229e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.655916 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"24e289b1-fa23-47f5-bd9b-c873abc229e6","Type":"ContainerDied","Data":"a9296d2ec3232070ec778f93cb6abcd4f563e2e633f9432a65754a68d4ae69d2"} Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.655977 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.655992 4611 scope.go:117] "RemoveContainer" containerID="18152e06beec7f0e17c71a62b98a91902c754428c84db34ca5b590634ec19267" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.657974 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c814bc12-1c7b-4d7b-a477-dd4407d3b1b7","Type":"ContainerDied","Data":"35c7ace4e87535224c59a9eb84d9f4257f78cbfe84afaad56ea0c189f4117dff"} Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.657990 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.685437 4611 scope.go:117] "RemoveContainer" containerID="ca81bbc06d3005290aa476272a474d130aa0e39c1b583d53b7dd7d1680669400" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.710228 4611 scope.go:117] "RemoveContainer" containerID="2069b70d45403179684441ab6164c166e9f8c680d13da9871c613b079ca9e897" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.715060 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.734693 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.788478 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" path="/var/lib/kubelet/pods/24e289b1-fa23-47f5-bd9b-c873abc229e6/volumes" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.789293 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae643ac0-acf8-4fe8-add7-ed326a6db7d2" path="/var/lib/kubelet/pods/ae643ac0-acf8-4fe8-add7-ed326a6db7d2/volumes" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.789969 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.790066 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.807998 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.821723 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: E0929 13:02:27.822212 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-api" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.822238 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-api" Sep 29 13:02:27 crc kubenswrapper[4611]: E0929 13:02:27.822262 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" containerName="nova-scheduler-scheduler" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.822270 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" containerName="nova-scheduler-scheduler" Sep 29 13:02:27 crc kubenswrapper[4611]: E0929 13:02:27.822284 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-log" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.822293 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-log" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.822637 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-log" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.822656 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="24e289b1-fa23-47f5-bd9b-c873abc229e6" containerName="nova-api-api" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.822671 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" containerName="nova-scheduler-scheduler" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.823917 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.833842 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.884080 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.907054 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.918673 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fvbb\" (UniqueName: \"kubernetes.io/projected/9d246b21-168b-419e-bd2d-dbc1a16db427-kube-api-access-5fvbb\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.918912 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d246b21-168b-419e-bd2d-dbc1a16db427-logs\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.918987 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.919116 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-config-data\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.925682 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.936038 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 13:02:27 crc kubenswrapper[4611]: I0929 13:02:27.958808 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021694 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021781 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fvbb\" (UniqueName: \"kubernetes.io/projected/9d246b21-168b-419e-bd2d-dbc1a16db427-kube-api-access-5fvbb\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021816 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d246b21-168b-419e-bd2d-dbc1a16db427-logs\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021835 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021856 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gzzx\" (UniqueName: \"kubernetes.io/projected/252195d0-8a55-4479-b2f6-3248f52fea78-kube-api-access-6gzzx\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021900 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-config-data\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.021940 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-config-data\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.025840 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d246b21-168b-419e-bd2d-dbc1a16db427-logs\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.038313 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-config-data\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.059417 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.124847 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.125029 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gzzx\" (UniqueName: \"kubernetes.io/projected/252195d0-8a55-4479-b2f6-3248f52fea78-kube-api-access-6gzzx\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.125099 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-config-data\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.139299 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fvbb\" (UniqueName: \"kubernetes.io/projected/9d246b21-168b-419e-bd2d-dbc1a16db427-kube-api-access-5fvbb\") pod \"nova-api-0\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.143501 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-config-data\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.148468 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.172469 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gzzx\" (UniqueName: \"kubernetes.io/projected/252195d0-8a55-4479-b2f6-3248f52fea78-kube-api-access-6gzzx\") pod \"nova-scheduler-0\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.245573 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.275230 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.694318 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5df993fe-7440-4ec0-97c7-498f4a999ad2","Type":"ContainerStarted","Data":"c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82"} Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.694386 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5df993fe-7440-4ec0-97c7-498f4a999ad2","Type":"ContainerStarted","Data":"8b0671cd302a792ed0e4eeb908d8064b40757cbab45efbab86f574e2a4790e18"} Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.823667 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:28 crc kubenswrapper[4611]: I0929 13:02:28.936535 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.706433 4611 generic.go:334] "Generic (PLEG): container finished" podID="30289ed8-4668-459a-9e89-698bea27c2f0" containerID="3b50f3e4a3e20ff1a194c72479ba28e67f37ed5ae96e7f2a964ba9e8ef5e2643" exitCode=0 Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.706501 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fdss8" event={"ID":"30289ed8-4668-459a-9e89-698bea27c2f0","Type":"ContainerDied","Data":"3b50f3e4a3e20ff1a194c72479ba28e67f37ed5ae96e7f2a964ba9e8ef5e2643"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.710389 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d246b21-168b-419e-bd2d-dbc1a16db427","Type":"ContainerStarted","Data":"1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.710510 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d246b21-168b-419e-bd2d-dbc1a16db427","Type":"ContainerStarted","Data":"b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.710581 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d246b21-168b-419e-bd2d-dbc1a16db427","Type":"ContainerStarted","Data":"deddf0cc32ceead3c0a8072b083c9874581e7b3d9d709c97c49dac26a58866c1"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.713458 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5df993fe-7440-4ec0-97c7-498f4a999ad2","Type":"ContainerStarted","Data":"d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.715480 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"252195d0-8a55-4479-b2f6-3248f52fea78","Type":"ContainerStarted","Data":"51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.715519 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"252195d0-8a55-4479-b2f6-3248f52fea78","Type":"ContainerStarted","Data":"d16dd60a374946accc40ac83ddd5f4097ae8b9c77a725d6e1636a9140b6d0e72"} Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.742903 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.742878677 podStartE2EDuration="2.742878677s" podCreationTimestamp="2025-09-29 13:02:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:29.741934759 +0000 UTC m=+1336.633454375" watchObservedRunningTime="2025-09-29 13:02:29.742878677 +0000 UTC m=+1336.634398283" Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.779731 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c814bc12-1c7b-4d7b-a477-dd4407d3b1b7" path="/var/lib/kubelet/pods/c814bc12-1c7b-4d7b-a477-dd4407d3b1b7/volumes" Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.799707 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.799685109 podStartE2EDuration="3.799685109s" podCreationTimestamp="2025-09-29 13:02:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:29.769338882 +0000 UTC m=+1336.660858488" watchObservedRunningTime="2025-09-29 13:02:29.799685109 +0000 UTC m=+1336.691204715" Sep 29 13:02:29 crc kubenswrapper[4611]: I0929 13:02:29.823721 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.823703354 podStartE2EDuration="2.823703354s" podCreationTimestamp="2025-09-29 13:02:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:29.791598605 +0000 UTC m=+1336.683118221" watchObservedRunningTime="2025-09-29 13:02:29.823703354 +0000 UTC m=+1336.715222960" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.055899 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.192420 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-975l2\" (UniqueName: \"kubernetes.io/projected/30289ed8-4668-459a-9e89-698bea27c2f0-kube-api-access-975l2\") pod \"30289ed8-4668-459a-9e89-698bea27c2f0\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.192568 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-scripts\") pod \"30289ed8-4668-459a-9e89-698bea27c2f0\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.192594 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-combined-ca-bundle\") pod \"30289ed8-4668-459a-9e89-698bea27c2f0\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.192701 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-config-data\") pod \"30289ed8-4668-459a-9e89-698bea27c2f0\" (UID: \"30289ed8-4668-459a-9e89-698bea27c2f0\") " Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.204908 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30289ed8-4668-459a-9e89-698bea27c2f0-kube-api-access-975l2" (OuterVolumeSpecName: "kube-api-access-975l2") pod "30289ed8-4668-459a-9e89-698bea27c2f0" (UID: "30289ed8-4668-459a-9e89-698bea27c2f0"). InnerVolumeSpecName "kube-api-access-975l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.206340 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-scripts" (OuterVolumeSpecName: "scripts") pod "30289ed8-4668-459a-9e89-698bea27c2f0" (UID: "30289ed8-4668-459a-9e89-698bea27c2f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.225350 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30289ed8-4668-459a-9e89-698bea27c2f0" (UID: "30289ed8-4668-459a-9e89-698bea27c2f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.228907 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-config-data" (OuterVolumeSpecName: "config-data") pod "30289ed8-4668-459a-9e89-698bea27c2f0" (UID: "30289ed8-4668-459a-9e89-698bea27c2f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.295216 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.295249 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.295258 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30289ed8-4668-459a-9e89-698bea27c2f0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.295267 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-975l2\" (UniqueName: \"kubernetes.io/projected/30289ed8-4668-459a-9e89-698bea27c2f0-kube-api-access-975l2\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.733878 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fdss8" event={"ID":"30289ed8-4668-459a-9e89-698bea27c2f0","Type":"ContainerDied","Data":"03b45725e014e42e8703bae579d7b1db2d96ff598f42f86916ca0457fd949a1a"} Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.733930 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03b45725e014e42e8703bae579d7b1db2d96ff598f42f86916ca0457fd949a1a" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.733994 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fdss8" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.813812 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 13:02:31 crc kubenswrapper[4611]: E0929 13:02:31.814254 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30289ed8-4668-459a-9e89-698bea27c2f0" containerName="nova-cell1-conductor-db-sync" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.814270 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="30289ed8-4668-459a-9e89-698bea27c2f0" containerName="nova-cell1-conductor-db-sync" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.814426 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="30289ed8-4668-459a-9e89-698bea27c2f0" containerName="nova-cell1-conductor-db-sync" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.815124 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.825570 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.829972 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.905084 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.905219 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn6sc\" (UniqueName: \"kubernetes.io/projected/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-kube-api-access-nn6sc\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:31 crc kubenswrapper[4611]: I0929 13:02:31.905267 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.007584 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn6sc\" (UniqueName: \"kubernetes.io/projected/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-kube-api-access-nn6sc\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.007694 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.007836 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.018396 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.018492 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.028144 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn6sc\" (UniqueName: \"kubernetes.io/projected/c05ef82a-7994-4d5a-b8e3-c04701bea7fe-kube-api-access-nn6sc\") pod \"nova-cell1-conductor-0\" (UID: \"c05ef82a-7994-4d5a-b8e3-c04701bea7fe\") " pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.095558 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.095677 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.144176 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.592574 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 13:02:32 crc kubenswrapper[4611]: W0929 13:02:32.599688 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc05ef82a_7994_4d5a_b8e3_c04701bea7fe.slice/crio-9cc5f87fa14cd847fb5b6760a278c82eaaa7cf92836272c2b96f2d4266f752d1 WatchSource:0}: Error finding container 9cc5f87fa14cd847fb5b6760a278c82eaaa7cf92836272c2b96f2d4266f752d1: Status 404 returned error can't find the container with id 9cc5f87fa14cd847fb5b6760a278c82eaaa7cf92836272c2b96f2d4266f752d1 Sep 29 13:02:32 crc kubenswrapper[4611]: I0929 13:02:32.746917 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c05ef82a-7994-4d5a-b8e3-c04701bea7fe","Type":"ContainerStarted","Data":"9cc5f87fa14cd847fb5b6760a278c82eaaa7cf92836272c2b96f2d4266f752d1"} Sep 29 13:02:33 crc kubenswrapper[4611]: I0929 13:02:33.275792 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 13:02:33 crc kubenswrapper[4611]: I0929 13:02:33.759102 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c05ef82a-7994-4d5a-b8e3-c04701bea7fe","Type":"ContainerStarted","Data":"c3455ca6fdbec63640fb80a4b70aea7e11b2474dd96343ccf5945c90ecdaadb5"} Sep 29 13:02:33 crc kubenswrapper[4611]: I0929 13:02:33.760096 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:33 crc kubenswrapper[4611]: I0929 13:02:33.793579 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.793560475 podStartE2EDuration="2.793560475s" podCreationTimestamp="2025-09-29 13:02:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:33.782593068 +0000 UTC m=+1340.674112684" watchObservedRunningTime="2025-09-29 13:02:33.793560475 +0000 UTC m=+1340.685080081" Sep 29 13:02:37 crc kubenswrapper[4611]: I0929 13:02:37.095389 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 13:02:37 crc kubenswrapper[4611]: I0929 13:02:37.096015 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 13:02:37 crc kubenswrapper[4611]: I0929 13:02:37.171285 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.104845 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.104892 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.245825 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.245888 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.275985 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.310835 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 13:02:38 crc kubenswrapper[4611]: I0929 13:02:38.840213 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 13:02:39 crc kubenswrapper[4611]: I0929 13:02:39.328852 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:02:39 crc kubenswrapper[4611]: I0929 13:02:39.328852 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.799242 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.856932 4611 generic.go:334] "Generic (PLEG): container finished" podID="3a34b746-a2fc-403d-9104-58aef93a7154" containerID="631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1" exitCode=137 Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.856988 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3a34b746-a2fc-403d-9104-58aef93a7154","Type":"ContainerDied","Data":"631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1"} Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.857022 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3a34b746-a2fc-403d-9104-58aef93a7154","Type":"ContainerDied","Data":"05e71df061c1b1f31df631eff82a33fcb9ef87cfcef48677b2924888cbc9e3f3"} Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.857046 4611 scope.go:117] "RemoveContainer" containerID="631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.857218 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.886908 4611 scope.go:117] "RemoveContainer" containerID="631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1" Sep 29 13:02:43 crc kubenswrapper[4611]: E0929 13:02:43.887879 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1\": container with ID starting with 631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1 not found: ID does not exist" containerID="631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.887941 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1"} err="failed to get container status \"631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1\": rpc error: code = NotFound desc = could not find container \"631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1\": container with ID starting with 631433950bf73af38d495b5bca92060528c10d5b33ac2d0c77518dc80cdadec1 not found: ID does not exist" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.938812 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-config-data\") pod \"3a34b746-a2fc-403d-9104-58aef93a7154\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.938921 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2p8s\" (UniqueName: \"kubernetes.io/projected/3a34b746-a2fc-403d-9104-58aef93a7154-kube-api-access-m2p8s\") pod \"3a34b746-a2fc-403d-9104-58aef93a7154\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.939141 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-combined-ca-bundle\") pod \"3a34b746-a2fc-403d-9104-58aef93a7154\" (UID: \"3a34b746-a2fc-403d-9104-58aef93a7154\") " Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.961799 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a34b746-a2fc-403d-9104-58aef93a7154-kube-api-access-m2p8s" (OuterVolumeSpecName: "kube-api-access-m2p8s") pod "3a34b746-a2fc-403d-9104-58aef93a7154" (UID: "3a34b746-a2fc-403d-9104-58aef93a7154"). InnerVolumeSpecName "kube-api-access-m2p8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.983866 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a34b746-a2fc-403d-9104-58aef93a7154" (UID: "3a34b746-a2fc-403d-9104-58aef93a7154"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:43 crc kubenswrapper[4611]: I0929 13:02:43.988848 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-config-data" (OuterVolumeSpecName: "config-data") pod "3a34b746-a2fc-403d-9104-58aef93a7154" (UID: "3a34b746-a2fc-403d-9104-58aef93a7154"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.041812 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.041888 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a34b746-a2fc-403d-9104-58aef93a7154-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.041928 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2p8s\" (UniqueName: \"kubernetes.io/projected/3a34b746-a2fc-403d-9104-58aef93a7154-kube-api-access-m2p8s\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.199775 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.212928 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.236608 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:44 crc kubenswrapper[4611]: E0929 13:02:44.237348 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a34b746-a2fc-403d-9104-58aef93a7154" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.237440 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a34b746-a2fc-403d-9104-58aef93a7154" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.237783 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a34b746-a2fc-403d-9104-58aef93a7154" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.238811 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.245121 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.246128 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.249441 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.291040 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.349109 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz98h\" (UniqueName: \"kubernetes.io/projected/b472baaa-35dd-4c0a-be69-991eb287a0f3-kube-api-access-xz98h\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.349260 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.349399 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.349429 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.349518 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.451026 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz98h\" (UniqueName: \"kubernetes.io/projected/b472baaa-35dd-4c0a-be69-991eb287a0f3-kube-api-access-xz98h\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.451102 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.451155 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.451179 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.451239 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.456097 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.457140 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.459671 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.461933 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b472baaa-35dd-4c0a-be69-991eb287a0f3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.474446 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz98h\" (UniqueName: \"kubernetes.io/projected/b472baaa-35dd-4c0a-be69-991eb287a0f3-kube-api-access-xz98h\") pod \"nova-cell1-novncproxy-0\" (UID: \"b472baaa-35dd-4c0a-be69-991eb287a0f3\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:44 crc kubenswrapper[4611]: I0929 13:02:44.564141 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:45 crc kubenswrapper[4611]: I0929 13:02:45.112644 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 13:02:45 crc kubenswrapper[4611]: W0929 13:02:45.120665 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb472baaa_35dd_4c0a_be69_991eb287a0f3.slice/crio-7e7b287c74433291607610e75837fefc438a79bab5757cc99334557f302957b8 WatchSource:0}: Error finding container 7e7b287c74433291607610e75837fefc438a79bab5757cc99334557f302957b8: Status 404 returned error can't find the container with id 7e7b287c74433291607610e75837fefc438a79bab5757cc99334557f302957b8 Sep 29 13:02:45 crc kubenswrapper[4611]: I0929 13:02:45.748492 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a34b746-a2fc-403d-9104-58aef93a7154" path="/var/lib/kubelet/pods/3a34b746-a2fc-403d-9104-58aef93a7154/volumes" Sep 29 13:02:45 crc kubenswrapper[4611]: I0929 13:02:45.879947 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b472baaa-35dd-4c0a-be69-991eb287a0f3","Type":"ContainerStarted","Data":"a62b450031dcf67ebb16ee9129f51a9b5073b2222b86f0957f5eec3cce56c803"} Sep 29 13:02:45 crc kubenswrapper[4611]: I0929 13:02:45.879991 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b472baaa-35dd-4c0a-be69-991eb287a0f3","Type":"ContainerStarted","Data":"7e7b287c74433291607610e75837fefc438a79bab5757cc99334557f302957b8"} Sep 29 13:02:45 crc kubenswrapper[4611]: I0929 13:02:45.901603 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.901582238 podStartE2EDuration="1.901582238s" podCreationTimestamp="2025-09-29 13:02:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:45.896017557 +0000 UTC m=+1352.787537163" watchObservedRunningTime="2025-09-29 13:02:45.901582238 +0000 UTC m=+1352.793101854" Sep 29 13:02:47 crc kubenswrapper[4611]: I0929 13:02:47.123393 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 13:02:47 crc kubenswrapper[4611]: I0929 13:02:47.123806 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 13:02:47 crc kubenswrapper[4611]: I0929 13:02:47.300742 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 13:02:47 crc kubenswrapper[4611]: I0929 13:02:47.321362 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.258478 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.260144 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.266322 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.329041 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.918510 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.929823 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 13:02:48 crc kubenswrapper[4611]: I0929 13:02:48.934101 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.175387 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d6dc8bf89-lpj5s"] Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.177173 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.183944 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d6dc8bf89-lpj5s"] Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.247517 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-config\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.247637 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-nb\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.247701 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcm5z\" (UniqueName: \"kubernetes.io/projected/fc465e99-037f-4fc4-acca-31b22fd061b5-kube-api-access-tcm5z\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.247747 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-svc\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.247788 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-sb\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.247925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-swift-storage-0\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.349743 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-config\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.349812 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-nb\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.349839 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcm5z\" (UniqueName: \"kubernetes.io/projected/fc465e99-037f-4fc4-acca-31b22fd061b5-kube-api-access-tcm5z\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.349865 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-svc\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.350672 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-svc\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.349888 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-sb\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.350805 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-swift-storage-0\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.350993 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-config\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.351045 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-sb\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.351353 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-swift-storage-0\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.352749 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-nb\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.381389 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcm5z\" (UniqueName: \"kubernetes.io/projected/fc465e99-037f-4fc4-acca-31b22fd061b5-kube-api-access-tcm5z\") pod \"dnsmasq-dns-d6dc8bf89-lpj5s\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.511296 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:49 crc kubenswrapper[4611]: I0929 13:02:49.567743 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:50 crc kubenswrapper[4611]: I0929 13:02:50.145006 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d6dc8bf89-lpj5s"] Sep 29 13:02:50 crc kubenswrapper[4611]: I0929 13:02:50.951588 4611 generic.go:334] "Generic (PLEG): container finished" podID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerID="09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225" exitCode=0 Sep 29 13:02:50 crc kubenswrapper[4611]: I0929 13:02:50.952119 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" event={"ID":"fc465e99-037f-4fc4-acca-31b22fd061b5","Type":"ContainerDied","Data":"09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225"} Sep 29 13:02:50 crc kubenswrapper[4611]: I0929 13:02:50.952170 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" event={"ID":"fc465e99-037f-4fc4-acca-31b22fd061b5","Type":"ContainerStarted","Data":"88c299dd417dd5f4e031d454c515e02b1e8bfa7526473f144177a171e9b5aa5e"} Sep 29 13:02:51 crc kubenswrapper[4611]: I0929 13:02:51.979455 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" event={"ID":"fc465e99-037f-4fc4-acca-31b22fd061b5","Type":"ContainerStarted","Data":"b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5"} Sep 29 13:02:51 crc kubenswrapper[4611]: I0929 13:02:51.979853 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.010881 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" podStartSLOduration=3.010861397 podStartE2EDuration="3.010861397s" podCreationTimestamp="2025-09-29 13:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:52.008525339 +0000 UTC m=+1358.900044945" watchObservedRunningTime="2025-09-29 13:02:52.010861397 +0000 UTC m=+1358.902381003" Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.750919 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.751600 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-central-agent" containerID="cri-o://a24c29159bd25e490e80fd3d79e95da7352285f60ab3bf33f9de7a440ae0d22b" gracePeriod=30 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.751703 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-notification-agent" containerID="cri-o://f4e410815c8ea1289b0e64473a3a332acbfe90368b0d2590fd975912246d57c4" gracePeriod=30 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.751697 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="sg-core" containerID="cri-o://15d8f1fa6a7cd836c02cd421c9b085a365231b6b6fd809ba182611efe580b1a3" gracePeriod=30 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.751769 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="proxy-httpd" containerID="cri-o://41358523d62ae7373c4120e7334d7affae2bbe62ea6ca3a833cd4e321eb7ca7e" gracePeriod=30 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.882461 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.897135 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-log" containerID="cri-o://b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6" gracePeriod=30 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.897250 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-api" containerID="cri-o://1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d" gracePeriod=30 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.991452 4611 generic.go:334] "Generic (PLEG): container finished" podID="e804f089-d35a-462b-80f0-562ede9dcffb" containerID="41358523d62ae7373c4120e7334d7affae2bbe62ea6ca3a833cd4e321eb7ca7e" exitCode=0 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.991486 4611 generic.go:334] "Generic (PLEG): container finished" podID="e804f089-d35a-462b-80f0-562ede9dcffb" containerID="15d8f1fa6a7cd836c02cd421c9b085a365231b6b6fd809ba182611efe580b1a3" exitCode=2 Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.991795 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerDied","Data":"41358523d62ae7373c4120e7334d7affae2bbe62ea6ca3a833cd4e321eb7ca7e"} Sep 29 13:02:52 crc kubenswrapper[4611]: I0929 13:02:52.991852 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerDied","Data":"15d8f1fa6a7cd836c02cd421c9b085a365231b6b6fd809ba182611efe580b1a3"} Sep 29 13:02:54 crc kubenswrapper[4611]: I0929 13:02:54.003858 4611 generic.go:334] "Generic (PLEG): container finished" podID="e804f089-d35a-462b-80f0-562ede9dcffb" containerID="a24c29159bd25e490e80fd3d79e95da7352285f60ab3bf33f9de7a440ae0d22b" exitCode=0 Sep 29 13:02:54 crc kubenswrapper[4611]: I0929 13:02:54.004191 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerDied","Data":"a24c29159bd25e490e80fd3d79e95da7352285f60ab3bf33f9de7a440ae0d22b"} Sep 29 13:02:54 crc kubenswrapper[4611]: I0929 13:02:54.007251 4611 generic.go:334] "Generic (PLEG): container finished" podID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerID="b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6" exitCode=143 Sep 29 13:02:54 crc kubenswrapper[4611]: I0929 13:02:54.007279 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d246b21-168b-419e-bd2d-dbc1a16db427","Type":"ContainerDied","Data":"b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6"} Sep 29 13:02:54 crc kubenswrapper[4611]: I0929 13:02:54.564431 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:54 crc kubenswrapper[4611]: I0929 13:02:54.595497 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.041475 4611 generic.go:334] "Generic (PLEG): container finished" podID="e804f089-d35a-462b-80f0-562ede9dcffb" containerID="f4e410815c8ea1289b0e64473a3a332acbfe90368b0d2590fd975912246d57c4" exitCode=0 Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.043103 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerDied","Data":"f4e410815c8ea1289b0e64473a3a332acbfe90368b0d2590fd975912246d57c4"} Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.075800 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.238913 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.394846 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-config-data\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.395236 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-sg-core-conf-yaml\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.395336 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-run-httpd\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.395431 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-combined-ca-bundle\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.395572 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-log-httpd\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.395704 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcj7g\" (UniqueName: \"kubernetes.io/projected/e804f089-d35a-462b-80f0-562ede9dcffb-kube-api-access-gcj7g\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.396861 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-ceilometer-tls-certs\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.397016 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-scripts\") pod \"e804f089-d35a-462b-80f0-562ede9dcffb\" (UID: \"e804f089-d35a-462b-80f0-562ede9dcffb\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.396733 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.397140 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.421701 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rsn2k"] Sep 29 13:02:55 crc kubenswrapper[4611]: E0929 13:02:55.422155 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-central-agent" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422174 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-central-agent" Sep 29 13:02:55 crc kubenswrapper[4611]: E0929 13:02:55.422204 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="proxy-httpd" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422214 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="proxy-httpd" Sep 29 13:02:55 crc kubenswrapper[4611]: E0929 13:02:55.422240 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="sg-core" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422248 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="sg-core" Sep 29 13:02:55 crc kubenswrapper[4611]: E0929 13:02:55.422262 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-notification-agent" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422270 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-notification-agent" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422513 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="proxy-httpd" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422535 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-central-agent" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.422551 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="sg-core" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.423984 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" containerName="ceilometer-notification-agent" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.434978 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rsn2k"] Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.435142 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.437293 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e804f089-d35a-462b-80f0-562ede9dcffb-kube-api-access-gcj7g" (OuterVolumeSpecName: "kube-api-access-gcj7g") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "kube-api-access-gcj7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.443297 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.443494 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.449852 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.453374 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-scripts" (OuterVolumeSpecName: "scripts") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.498906 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw57s\" (UniqueName: \"kubernetes.io/projected/a2702802-11e4-4903-9943-fb74e6f7b756-kube-api-access-sw57s\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499327 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-config-data\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499390 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-scripts\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499429 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499607 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499660 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499678 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499692 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e804f089-d35a-462b-80f0-562ede9dcffb-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.499704 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcj7g\" (UniqueName: \"kubernetes.io/projected/e804f089-d35a-462b-80f0-562ede9dcffb-kube-api-access-gcj7g\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.563221 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-config-data" (OuterVolumeSpecName: "config-data") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.563251 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601458 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw57s\" (UniqueName: \"kubernetes.io/projected/a2702802-11e4-4903-9943-fb74e6f7b756-kube-api-access-sw57s\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601529 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-config-data\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601534 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e804f089-d35a-462b-80f0-562ede9dcffb" (UID: "e804f089-d35a-462b-80f0-562ede9dcffb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601565 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-scripts\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601651 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601716 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601732 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.601745 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e804f089-d35a-462b-80f0-562ede9dcffb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.607058 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.607778 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-config-data\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.611748 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-scripts\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.624984 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.631062 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw57s\" (UniqueName: \"kubernetes.io/projected/a2702802-11e4-4903-9943-fb74e6f7b756-kube-api-access-sw57s\") pod \"nova-cell1-cell-mapping-rsn2k\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.702547 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fvbb\" (UniqueName: \"kubernetes.io/projected/9d246b21-168b-419e-bd2d-dbc1a16db427-kube-api-access-5fvbb\") pod \"9d246b21-168b-419e-bd2d-dbc1a16db427\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.702736 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-combined-ca-bundle\") pod \"9d246b21-168b-419e-bd2d-dbc1a16db427\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.702790 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d246b21-168b-419e-bd2d-dbc1a16db427-logs\") pod \"9d246b21-168b-419e-bd2d-dbc1a16db427\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.702852 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-config-data\") pod \"9d246b21-168b-419e-bd2d-dbc1a16db427\" (UID: \"9d246b21-168b-419e-bd2d-dbc1a16db427\") " Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.758740 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d246b21-168b-419e-bd2d-dbc1a16db427-logs" (OuterVolumeSpecName: "logs") pod "9d246b21-168b-419e-bd2d-dbc1a16db427" (UID: "9d246b21-168b-419e-bd2d-dbc1a16db427"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.774369 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d246b21-168b-419e-bd2d-dbc1a16db427-kube-api-access-5fvbb" (OuterVolumeSpecName: "kube-api-access-5fvbb") pod "9d246b21-168b-419e-bd2d-dbc1a16db427" (UID: "9d246b21-168b-419e-bd2d-dbc1a16db427"). InnerVolumeSpecName "kube-api-access-5fvbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.804754 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d246b21-168b-419e-bd2d-dbc1a16db427-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.804784 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fvbb\" (UniqueName: \"kubernetes.io/projected/9d246b21-168b-419e-bd2d-dbc1a16db427-kube-api-access-5fvbb\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.807014 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.833199 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-config-data" (OuterVolumeSpecName: "config-data") pod "9d246b21-168b-419e-bd2d-dbc1a16db427" (UID: "9d246b21-168b-419e-bd2d-dbc1a16db427"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.847358 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d246b21-168b-419e-bd2d-dbc1a16db427" (UID: "9d246b21-168b-419e-bd2d-dbc1a16db427"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.906573 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:55 crc kubenswrapper[4611]: I0929 13:02:55.906609 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d246b21-168b-419e-bd2d-dbc1a16db427-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.059232 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.060413 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e804f089-d35a-462b-80f0-562ede9dcffb","Type":"ContainerDied","Data":"3e427212c442c22a93f50304ff4e1beb0d89e9382d28a19ff1c326d94f244022"} Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.060455 4611 scope.go:117] "RemoveContainer" containerID="41358523d62ae7373c4120e7334d7affae2bbe62ea6ca3a833cd4e321eb7ca7e" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.064335 4611 generic.go:334] "Generic (PLEG): container finished" podID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerID="1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d" exitCode=0 Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.064474 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d246b21-168b-419e-bd2d-dbc1a16db427","Type":"ContainerDied","Data":"1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d"} Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.064539 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d246b21-168b-419e-bd2d-dbc1a16db427","Type":"ContainerDied","Data":"deddf0cc32ceead3c0a8072b083c9874581e7b3d9d709c97c49dac26a58866c1"} Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.064725 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.102264 4611 scope.go:117] "RemoveContainer" containerID="15d8f1fa6a7cd836c02cd421c9b085a365231b6b6fd809ba182611efe580b1a3" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.117725 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.156088 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.175005 4611 scope.go:117] "RemoveContainer" containerID="f4e410815c8ea1289b0e64473a3a332acbfe90368b0d2590fd975912246d57c4" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.177491 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: E0929 13:02:56.178083 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-api" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.178187 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-api" Sep 29 13:02:56 crc kubenswrapper[4611]: E0929 13:02:56.178321 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-log" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.178406 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-log" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.178758 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-api" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.178939 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" containerName="nova-api-log" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.180996 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.192459 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.192805 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.193125 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.194748 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.209423 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.243615 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.254546 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.256050 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.261736 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.261935 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.262048 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.273220 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.274607 4611 scope.go:117] "RemoveContainer" containerID="a24c29159bd25e490e80fd3d79e95da7352285f60ab3bf33f9de7a440ae0d22b" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.318795 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.318849 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.318881 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-public-tls-certs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.318918 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.318971 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kqbs\" (UniqueName: \"kubernetes.io/projected/2c8a027f-b0f8-47f4-9747-f727e6fb940c-kube-api-access-6kqbs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.318986 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319018 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-scripts\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319093 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-config-data\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319108 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319123 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-run-httpd\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319147 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfnvf\" (UniqueName: \"kubernetes.io/projected/1da3e653-00d9-42af-9020-e135e8d8eeae-kube-api-access-pfnvf\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319166 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-log-httpd\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319246 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-config-data\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.319284 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c8a027f-b0f8-47f4-9747-f727e6fb940c-logs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.324350 4611 scope.go:117] "RemoveContainer" containerID="1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.350691 4611 scope.go:117] "RemoveContainer" containerID="b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.367265 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rsn2k"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.393000 4611 scope.go:117] "RemoveContainer" containerID="1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d" Sep 29 13:02:56 crc kubenswrapper[4611]: E0929 13:02:56.393519 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d\": container with ID starting with 1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d not found: ID does not exist" containerID="1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.393546 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d"} err="failed to get container status \"1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d\": rpc error: code = NotFound desc = could not find container \"1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d\": container with ID starting with 1e942595804d2e2cbbff6446744ea5da20794317c196272795b90b3a3f37285d not found: ID does not exist" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.393570 4611 scope.go:117] "RemoveContainer" containerID="b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6" Sep 29 13:02:56 crc kubenswrapper[4611]: E0929 13:02:56.393879 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6\": container with ID starting with b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6 not found: ID does not exist" containerID="b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.393909 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6"} err="failed to get container status \"b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6\": rpc error: code = NotFound desc = could not find container \"b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6\": container with ID starting with b8d0b0dbf4814d93414511d5c0f5fc18c2f992c247f8ca80ab86481f780f74f6 not found: ID does not exist" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423133 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423455 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-public-tls-certs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423573 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423655 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kqbs\" (UniqueName: \"kubernetes.io/projected/2c8a027f-b0f8-47f4-9747-f727e6fb940c-kube-api-access-6kqbs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423679 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423720 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-scripts\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423827 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-config-data\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423858 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423885 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-run-httpd\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423925 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfnvf\" (UniqueName: \"kubernetes.io/projected/1da3e653-00d9-42af-9020-e135e8d8eeae-kube-api-access-pfnvf\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.423957 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-log-httpd\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.424013 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-config-data\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.424065 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c8a027f-b0f8-47f4-9747-f727e6fb940c-logs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.424484 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c8a027f-b0f8-47f4-9747-f727e6fb940c-logs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.425599 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-run-httpd\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.425853 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-log-httpd\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.434975 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-public-tls-certs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.435249 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.436417 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.436667 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-config-data\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.436980 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.441384 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.447553 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.449911 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-config-data\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.456007 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-scripts\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.456578 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfnvf\" (UniqueName: \"kubernetes.io/projected/1da3e653-00d9-42af-9020-e135e8d8eeae-kube-api-access-pfnvf\") pod \"ceilometer-0\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.463268 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kqbs\" (UniqueName: \"kubernetes.io/projected/2c8a027f-b0f8-47f4-9747-f727e6fb940c-kube-api-access-6kqbs\") pod \"nova-api-0\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " pod="openstack/nova-api-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.510250 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.535936 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:56 crc kubenswrapper[4611]: I0929 13:02:56.590494 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.054523 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.094106 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerStarted","Data":"f954b45231f9939ca9f36285dd59956736c265c6d0edf2aa3d89ab213690fdbd"} Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.109095 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rsn2k" event={"ID":"a2702802-11e4-4903-9943-fb74e6f7b756","Type":"ContainerStarted","Data":"fdf66591eb64ed2281de39648f1a088fb86c5ff83666f43b19a5ddd508b4c393"} Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.109146 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rsn2k" event={"ID":"a2702802-11e4-4903-9943-fb74e6f7b756","Type":"ContainerStarted","Data":"f85d3b9a080fc1cdd262656c770fca4f74974d0be21e24267b85d81d9f9bdc44"} Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.151062 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.160822 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rsn2k" podStartSLOduration=2.160800029 podStartE2EDuration="2.160800029s" podCreationTimestamp="2025-09-29 13:02:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:57.135543189 +0000 UTC m=+1364.027062805" watchObservedRunningTime="2025-09-29 13:02:57.160800029 +0000 UTC m=+1364.052319635" Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.750801 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d246b21-168b-419e-bd2d-dbc1a16db427" path="/var/lib/kubelet/pods/9d246b21-168b-419e-bd2d-dbc1a16db427/volumes" Sep 29 13:02:57 crc kubenswrapper[4611]: I0929 13:02:57.752086 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e804f089-d35a-462b-80f0-562ede9dcffb" path="/var/lib/kubelet/pods/e804f089-d35a-462b-80f0-562ede9dcffb/volumes" Sep 29 13:02:58 crc kubenswrapper[4611]: I0929 13:02:58.134306 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c8a027f-b0f8-47f4-9747-f727e6fb940c","Type":"ContainerStarted","Data":"db8b8e15e41078bc2b3cf7cac6e743aee9d042a986a9dc98cf31b897d733c4aa"} Sep 29 13:02:58 crc kubenswrapper[4611]: I0929 13:02:58.134360 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c8a027f-b0f8-47f4-9747-f727e6fb940c","Type":"ContainerStarted","Data":"ed1cd677a4029765fb485d4b1de52cf26a01401ea9e7785ba360e515ce93b3db"} Sep 29 13:02:58 crc kubenswrapper[4611]: I0929 13:02:58.134373 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c8a027f-b0f8-47f4-9747-f727e6fb940c","Type":"ContainerStarted","Data":"dad381fff56b4feb540c2d30e47b64d85f0b44a88e1cb7abb06941ea77c741ac"} Sep 29 13:02:58 crc kubenswrapper[4611]: I0929 13:02:58.136448 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerStarted","Data":"cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1"} Sep 29 13:02:58 crc kubenswrapper[4611]: I0929 13:02:58.162796 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.162774325 podStartE2EDuration="2.162774325s" podCreationTimestamp="2025-09-29 13:02:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:02:58.162059624 +0000 UTC m=+1365.053579240" watchObservedRunningTime="2025-09-29 13:02:58.162774325 +0000 UTC m=+1365.054293931" Sep 29 13:02:59 crc kubenswrapper[4611]: I0929 13:02:59.193151 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerStarted","Data":"ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb"} Sep 29 13:02:59 crc kubenswrapper[4611]: I0929 13:02:59.513959 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:02:59 crc kubenswrapper[4611]: I0929 13:02:59.608397 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b9876949c-pzwtc"] Sep 29 13:02:59 crc kubenswrapper[4611]: I0929 13:02:59.608647 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="dnsmasq-dns" containerID="cri-o://123dbe6cc20a8552bd1d2dd780fada2fee7a5f8d7b11085a1a604c577869e040" gracePeriod=10 Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.216224 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerStarted","Data":"44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12"} Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.231679 4611 generic.go:334] "Generic (PLEG): container finished" podID="30738414-5a70-41ef-a024-9494359b8b2e" containerID="123dbe6cc20a8552bd1d2dd780fada2fee7a5f8d7b11085a1a604c577869e040" exitCode=0 Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.231719 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" event={"ID":"30738414-5a70-41ef-a024-9494359b8b2e","Type":"ContainerDied","Data":"123dbe6cc20a8552bd1d2dd780fada2fee7a5f8d7b11085a1a604c577869e040"} Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.370063 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.438277 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-sb\") pod \"30738414-5a70-41ef-a024-9494359b8b2e\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.438364 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-nb\") pod \"30738414-5a70-41ef-a024-9494359b8b2e\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.438401 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shdwx\" (UniqueName: \"kubernetes.io/projected/30738414-5a70-41ef-a024-9494359b8b2e-kube-api-access-shdwx\") pod \"30738414-5a70-41ef-a024-9494359b8b2e\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.438430 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-config\") pod \"30738414-5a70-41ef-a024-9494359b8b2e\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.438455 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-svc\") pod \"30738414-5a70-41ef-a024-9494359b8b2e\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.438580 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-swift-storage-0\") pod \"30738414-5a70-41ef-a024-9494359b8b2e\" (UID: \"30738414-5a70-41ef-a024-9494359b8b2e\") " Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.465869 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30738414-5a70-41ef-a024-9494359b8b2e-kube-api-access-shdwx" (OuterVolumeSpecName: "kube-api-access-shdwx") pod "30738414-5a70-41ef-a024-9494359b8b2e" (UID: "30738414-5a70-41ef-a024-9494359b8b2e"). InnerVolumeSpecName "kube-api-access-shdwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.543491 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shdwx\" (UniqueName: \"kubernetes.io/projected/30738414-5a70-41ef-a024-9494359b8b2e-kube-api-access-shdwx\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.587904 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "30738414-5a70-41ef-a024-9494359b8b2e" (UID: "30738414-5a70-41ef-a024-9494359b8b2e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.602495 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30738414-5a70-41ef-a024-9494359b8b2e" (UID: "30738414-5a70-41ef-a024-9494359b8b2e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.616715 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "30738414-5a70-41ef-a024-9494359b8b2e" (UID: "30738414-5a70-41ef-a024-9494359b8b2e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.617056 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-config" (OuterVolumeSpecName: "config") pod "30738414-5a70-41ef-a024-9494359b8b2e" (UID: "30738414-5a70-41ef-a024-9494359b8b2e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.646029 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.646061 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.646094 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.646105 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.656219 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "30738414-5a70-41ef-a024-9494359b8b2e" (UID: "30738414-5a70-41ef-a024-9494359b8b2e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:00 crc kubenswrapper[4611]: I0929 13:03:00.748007 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30738414-5a70-41ef-a024-9494359b8b2e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.243884 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" event={"ID":"30738414-5a70-41ef-a024-9494359b8b2e","Type":"ContainerDied","Data":"b412ba6b0d1a3bdcb7b0964d063799b9ca2ea8b01bd222c028bc517e161bebea"} Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.243895 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.244178 4611 scope.go:117] "RemoveContainer" containerID="123dbe6cc20a8552bd1d2dd780fada2fee7a5f8d7b11085a1a604c577869e040" Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.247217 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerStarted","Data":"69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0"} Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.247404 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.247400 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="proxy-httpd" containerID="cri-o://69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0" gracePeriod=30 Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.247384 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-central-agent" containerID="cri-o://cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1" gracePeriod=30 Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.247516 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-notification-agent" containerID="cri-o://ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb" gracePeriod=30 Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.247683 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="sg-core" containerID="cri-o://44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12" gracePeriod=30 Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.283237 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7264536719999999 podStartE2EDuration="5.283216732s" podCreationTimestamp="2025-09-29 13:02:56 +0000 UTC" firstStartedPulling="2025-09-29 13:02:57.064060341 +0000 UTC m=+1363.955579947" lastFinishedPulling="2025-09-29 13:03:00.620823401 +0000 UTC m=+1367.512343007" observedRunningTime="2025-09-29 13:03:01.278892377 +0000 UTC m=+1368.170411983" watchObservedRunningTime="2025-09-29 13:03:01.283216732 +0000 UTC m=+1368.174736338" Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.305806 4611 scope.go:117] "RemoveContainer" containerID="1be7cd241a53244bc8b74954c63fdaaff22816917b9f04c159940621e8878be1" Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.318706 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b9876949c-pzwtc"] Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.328258 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b9876949c-pzwtc"] Sep 29 13:03:01 crc kubenswrapper[4611]: I0929 13:03:01.759163 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30738414-5a70-41ef-a024-9494359b8b2e" path="/var/lib/kubelet/pods/30738414-5a70-41ef-a024-9494359b8b2e/volumes" Sep 29 13:03:02 crc kubenswrapper[4611]: I0929 13:03:02.261049 4611 generic.go:334] "Generic (PLEG): container finished" podID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerID="69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0" exitCode=0 Sep 29 13:03:02 crc kubenswrapper[4611]: I0929 13:03:02.261365 4611 generic.go:334] "Generic (PLEG): container finished" podID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerID="44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12" exitCode=2 Sep 29 13:03:02 crc kubenswrapper[4611]: I0929 13:03:02.261373 4611 generic.go:334] "Generic (PLEG): container finished" podID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerID="ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb" exitCode=0 Sep 29 13:03:02 crc kubenswrapper[4611]: I0929 13:03:02.261113 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerDied","Data":"69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0"} Sep 29 13:03:02 crc kubenswrapper[4611]: I0929 13:03:02.261407 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerDied","Data":"44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12"} Sep 29 13:03:02 crc kubenswrapper[4611]: I0929 13:03:02.261424 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerDied","Data":"ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb"} Sep 29 13:03:04 crc kubenswrapper[4611]: I0929 13:03:04.628131 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:03:04 crc kubenswrapper[4611]: I0929 13:03:04.628418 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:03:05 crc kubenswrapper[4611]: I0929 13:03:05.270289 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b9876949c-pzwtc" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.196:5353: i/o timeout" Sep 29 13:03:05 crc kubenswrapper[4611]: I0929 13:03:05.291957 4611 generic.go:334] "Generic (PLEG): container finished" podID="a2702802-11e4-4903-9943-fb74e6f7b756" containerID="fdf66591eb64ed2281de39648f1a088fb86c5ff83666f43b19a5ddd508b4c393" exitCode=0 Sep 29 13:03:05 crc kubenswrapper[4611]: I0929 13:03:05.292011 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rsn2k" event={"ID":"a2702802-11e4-4903-9943-fb74e6f7b756","Type":"ContainerDied","Data":"fdf66591eb64ed2281de39648f1a088fb86c5ff83666f43b19a5ddd508b4c393"} Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.590870 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.592294 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.695497 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.758058 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-config-data\") pod \"a2702802-11e4-4903-9943-fb74e6f7b756\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.758152 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw57s\" (UniqueName: \"kubernetes.io/projected/a2702802-11e4-4903-9943-fb74e6f7b756-kube-api-access-sw57s\") pod \"a2702802-11e4-4903-9943-fb74e6f7b756\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.758300 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-scripts\") pod \"a2702802-11e4-4903-9943-fb74e6f7b756\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.758446 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-combined-ca-bundle\") pod \"a2702802-11e4-4903-9943-fb74e6f7b756\" (UID: \"a2702802-11e4-4903-9943-fb74e6f7b756\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.767306 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2702802-11e4-4903-9943-fb74e6f7b756-kube-api-access-sw57s" (OuterVolumeSpecName: "kube-api-access-sw57s") pod "a2702802-11e4-4903-9943-fb74e6f7b756" (UID: "a2702802-11e4-4903-9943-fb74e6f7b756"). InnerVolumeSpecName "kube-api-access-sw57s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.768214 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-scripts" (OuterVolumeSpecName: "scripts") pod "a2702802-11e4-4903-9943-fb74e6f7b756" (UID: "a2702802-11e4-4903-9943-fb74e6f7b756"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.793835 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-config-data" (OuterVolumeSpecName: "config-data") pod "a2702802-11e4-4903-9943-fb74e6f7b756" (UID: "a2702802-11e4-4903-9943-fb74e6f7b756"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.815179 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2702802-11e4-4903-9943-fb74e6f7b756" (UID: "a2702802-11e4-4903-9943-fb74e6f7b756"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.861036 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.861082 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.861097 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2702802-11e4-4903-9943-fb74e6f7b756-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.861109 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw57s\" (UniqueName: \"kubernetes.io/projected/a2702802-11e4-4903-9943-fb74e6f7b756-kube-api-access-sw57s\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.893586 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963441 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-combined-ca-bundle\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963523 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-run-httpd\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963585 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-log-httpd\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963649 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-config-data\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963823 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-ceilometer-tls-certs\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963863 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfnvf\" (UniqueName: \"kubernetes.io/projected/1da3e653-00d9-42af-9020-e135e8d8eeae-kube-api-access-pfnvf\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963906 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-sg-core-conf-yaml\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.963961 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-scripts\") pod \"1da3e653-00d9-42af-9020-e135e8d8eeae\" (UID: \"1da3e653-00d9-42af-9020-e135e8d8eeae\") " Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.964310 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.964651 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.964720 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.970516 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1da3e653-00d9-42af-9020-e135e8d8eeae-kube-api-access-pfnvf" (OuterVolumeSpecName: "kube-api-access-pfnvf") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "kube-api-access-pfnvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.972978 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-scripts" (OuterVolumeSpecName: "scripts") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:06 crc kubenswrapper[4611]: I0929 13:03:06.996669 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.032892 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.054850 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.066215 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1da3e653-00d9-42af-9020-e135e8d8eeae-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.066260 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.066364 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfnvf\" (UniqueName: \"kubernetes.io/projected/1da3e653-00d9-42af-9020-e135e8d8eeae-kube-api-access-pfnvf\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.066375 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.066385 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.066395 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.096028 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-config-data" (OuterVolumeSpecName: "config-data") pod "1da3e653-00d9-42af-9020-e135e8d8eeae" (UID: "1da3e653-00d9-42af-9020-e135e8d8eeae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.167949 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1da3e653-00d9-42af-9020-e135e8d8eeae-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.312421 4611 generic.go:334] "Generic (PLEG): container finished" podID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerID="cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1" exitCode=0 Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.312473 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.312503 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerDied","Data":"cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1"} Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.312534 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1da3e653-00d9-42af-9020-e135e8d8eeae","Type":"ContainerDied","Data":"f954b45231f9939ca9f36285dd59956736c265c6d0edf2aa3d89ab213690fdbd"} Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.312556 4611 scope.go:117] "RemoveContainer" containerID="69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.315922 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rsn2k" event={"ID":"a2702802-11e4-4903-9943-fb74e6f7b756","Type":"ContainerDied","Data":"f85d3b9a080fc1cdd262656c770fca4f74974d0be21e24267b85d81d9f9bdc44"} Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.316109 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f85d3b9a080fc1cdd262656c770fca4f74974d0be21e24267b85d81d9f9bdc44" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.315973 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rsn2k" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.369966 4611 scope.go:117] "RemoveContainer" containerID="44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.373133 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.389795 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.453775 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454215 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-central-agent" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454230 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-central-agent" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454248 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="proxy-httpd" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454255 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="proxy-httpd" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454269 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-notification-agent" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454276 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-notification-agent" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454296 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="sg-core" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454303 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="sg-core" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454320 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2702802-11e4-4903-9943-fb74e6f7b756" containerName="nova-manage" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454326 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2702802-11e4-4903-9943-fb74e6f7b756" containerName="nova-manage" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454340 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="dnsmasq-dns" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454348 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="dnsmasq-dns" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.454362 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="init" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454368 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="init" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454567 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="30738414-5a70-41ef-a024-9494359b8b2e" containerName="dnsmasq-dns" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454587 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-notification-agent" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454603 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="proxy-httpd" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.454614 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="ceilometer-central-agent" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.455225 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2702802-11e4-4903-9943-fb74e6f7b756" containerName="nova-manage" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.455278 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" containerName="sg-core" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.465456 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.471181 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.474956 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.477308 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.477553 4611 scope.go:117] "RemoveContainer" containerID="ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.483354 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.552106 4611 scope.go:117] "RemoveContainer" containerID="cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.557443 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574532 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-config-data\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574601 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574678 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-scripts\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574763 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574803 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-log-httpd\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574856 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8bl6\" (UniqueName: \"kubernetes.io/projected/72b8cd6d-9dfa-483d-9634-09df584bf7ed-kube-api-access-f8bl6\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574885 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-run-httpd\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.574919 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.593219 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.593471 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="252195d0-8a55-4479-b2f6-3248f52fea78" containerName="nova-scheduler-scheduler" containerID="cri-o://51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" gracePeriod=30 Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.616761 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.617423 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-log" containerID="cri-o://c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82" gracePeriod=30 Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.617592 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-metadata" containerID="cri-o://d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb" gracePeriod=30 Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.629380 4611 scope.go:117] "RemoveContainer" containerID="69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.630705 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0\": container with ID starting with 69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0 not found: ID does not exist" containerID="69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.630749 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0"} err="failed to get container status \"69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0\": rpc error: code = NotFound desc = could not find container \"69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0\": container with ID starting with 69c38aed7d3446dfca1d44babd4eabea2096cb7fc6d4668d77a52102b70910d0 not found: ID does not exist" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.630773 4611 scope.go:117] "RemoveContainer" containerID="44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.633278 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12\": container with ID starting with 44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12 not found: ID does not exist" containerID="44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.633366 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12"} err="failed to get container status \"44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12\": rpc error: code = NotFound desc = could not find container \"44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12\": container with ID starting with 44ba2361a994d6b08f1908ad613899ee4c0cde9271d99a8a8ad1bc3cc7d64b12 not found: ID does not exist" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.633397 4611 scope.go:117] "RemoveContainer" containerID="ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.635418 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb\": container with ID starting with ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb not found: ID does not exist" containerID="ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.635478 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb"} err="failed to get container status \"ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb\": rpc error: code = NotFound desc = could not find container \"ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb\": container with ID starting with ec4c6ac40c96361613a1edd6ef74e835f76fcb3e88646e7055587e22b7aeafeb not found: ID does not exist" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.635504 4611 scope.go:117] "RemoveContainer" containerID="cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1" Sep 29 13:03:07 crc kubenswrapper[4611]: E0929 13:03:07.637153 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1\": container with ID starting with cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1 not found: ID does not exist" containerID="cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.637225 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1"} err="failed to get container status \"cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1\": rpc error: code = NotFound desc = could not find container \"cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1\": container with ID starting with cecfe36151a286c9bb90fb0c192dc4a2aa8a59d903874e56ab8d5c6a1dec4de1 not found: ID does not exist" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.678143 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.678548 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-log-httpd\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.678742 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8bl6\" (UniqueName: \"kubernetes.io/projected/72b8cd6d-9dfa-483d-9634-09df584bf7ed-kube-api-access-f8bl6\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.678775 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-run-httpd\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.678835 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.678956 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-config-data\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.679007 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.679092 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-scripts\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.680802 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.681136 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.685501 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-run-httpd\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.688652 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-config-data\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.688954 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-log-httpd\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.690239 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.696615 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-scripts\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.704228 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.712578 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.744473 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8bl6\" (UniqueName: \"kubernetes.io/projected/72b8cd6d-9dfa-483d-9634-09df584bf7ed-kube-api-access-f8bl6\") pod \"ceilometer-0\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " pod="openstack/ceilometer-0" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.804935 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1da3e653-00d9-42af-9020-e135e8d8eeae" path="/var/lib/kubelet/pods/1da3e653-00d9-42af-9020-e135e8d8eeae/volumes" Sep 29 13:03:07 crc kubenswrapper[4611]: I0929 13:03:07.818647 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:03:08 crc kubenswrapper[4611]: E0929 13:03:08.287817 4611 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 13:03:08 crc kubenswrapper[4611]: E0929 13:03:08.291853 4611 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 13:03:08 crc kubenswrapper[4611]: E0929 13:03:08.295065 4611 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 13:03:08 crc kubenswrapper[4611]: E0929 13:03:08.295122 4611 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="252195d0-8a55-4479-b2f6-3248f52fea78" containerName="nova-scheduler-scheduler" Sep 29 13:03:08 crc kubenswrapper[4611]: I0929 13:03:08.327774 4611 generic.go:334] "Generic (PLEG): container finished" podID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerID="c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82" exitCode=143 Sep 29 13:03:08 crc kubenswrapper[4611]: I0929 13:03:08.327842 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5df993fe-7440-4ec0-97c7-498f4a999ad2","Type":"ContainerDied","Data":"c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82"} Sep 29 13:03:08 crc kubenswrapper[4611]: I0929 13:03:08.333745 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-log" containerID="cri-o://ed1cd677a4029765fb485d4b1de52cf26a01401ea9e7785ba360e515ce93b3db" gracePeriod=30 Sep 29 13:03:08 crc kubenswrapper[4611]: I0929 13:03:08.333805 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-api" containerID="cri-o://db8b8e15e41078bc2b3cf7cac6e743aee9d042a986a9dc98cf31b897d733c4aa" gracePeriod=30 Sep 29 13:03:08 crc kubenswrapper[4611]: I0929 13:03:08.513806 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:03:09 crc kubenswrapper[4611]: I0929 13:03:09.344124 4611 generic.go:334] "Generic (PLEG): container finished" podID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerID="ed1cd677a4029765fb485d4b1de52cf26a01401ea9e7785ba360e515ce93b3db" exitCode=143 Sep 29 13:03:09 crc kubenswrapper[4611]: I0929 13:03:09.344760 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c8a027f-b0f8-47f4-9747-f727e6fb940c","Type":"ContainerDied","Data":"ed1cd677a4029765fb485d4b1de52cf26a01401ea9e7785ba360e515ce93b3db"} Sep 29 13:03:09 crc kubenswrapper[4611]: I0929 13:03:09.346661 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerStarted","Data":"bd6551d1fa5c534d534ad3bc12cd9c5c937482943902f3332d4f0b9f8862bdcd"} Sep 29 13:03:09 crc kubenswrapper[4611]: I0929 13:03:09.346693 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerStarted","Data":"4f3c5d454477ef0995659f62f2993f7c8c0489a6d34634d2af78357c11097ad4"} Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.205517 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.239184 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxdth\" (UniqueName: \"kubernetes.io/projected/5df993fe-7440-4ec0-97c7-498f4a999ad2-kube-api-access-jxdth\") pod \"5df993fe-7440-4ec0-97c7-498f4a999ad2\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.239263 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-config-data\") pod \"5df993fe-7440-4ec0-97c7-498f4a999ad2\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.239313 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-combined-ca-bundle\") pod \"5df993fe-7440-4ec0-97c7-498f4a999ad2\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.239462 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df993fe-7440-4ec0-97c7-498f4a999ad2-logs\") pod \"5df993fe-7440-4ec0-97c7-498f4a999ad2\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.239493 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-nova-metadata-tls-certs\") pod \"5df993fe-7440-4ec0-97c7-498f4a999ad2\" (UID: \"5df993fe-7440-4ec0-97c7-498f4a999ad2\") " Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.240037 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5df993fe-7440-4ec0-97c7-498f4a999ad2-logs" (OuterVolumeSpecName: "logs") pod "5df993fe-7440-4ec0-97c7-498f4a999ad2" (UID: "5df993fe-7440-4ec0-97c7-498f4a999ad2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.275948 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5df993fe-7440-4ec0-97c7-498f4a999ad2-kube-api-access-jxdth" (OuterVolumeSpecName: "kube-api-access-jxdth") pod "5df993fe-7440-4ec0-97c7-498f4a999ad2" (UID: "5df993fe-7440-4ec0-97c7-498f4a999ad2"). InnerVolumeSpecName "kube-api-access-jxdth". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.312830 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-config-data" (OuterVolumeSpecName: "config-data") pod "5df993fe-7440-4ec0-97c7-498f4a999ad2" (UID: "5df993fe-7440-4ec0-97c7-498f4a999ad2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.342908 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df993fe-7440-4ec0-97c7-498f4a999ad2-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.343929 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxdth\" (UniqueName: \"kubernetes.io/projected/5df993fe-7440-4ec0-97c7-498f4a999ad2-kube-api-access-jxdth\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.343941 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.355985 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5df993fe-7440-4ec0-97c7-498f4a999ad2" (UID: "5df993fe-7440-4ec0-97c7-498f4a999ad2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.363703 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5df993fe-7440-4ec0-97c7-498f4a999ad2" (UID: "5df993fe-7440-4ec0-97c7-498f4a999ad2"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.377711 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerStarted","Data":"6a4291c400687a8859bca93b8773a6865f9de8532f23fe5ceddcf8dad9b2846f"} Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.384021 4611 generic.go:334] "Generic (PLEG): container finished" podID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerID="d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb" exitCode=0 Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.384078 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5df993fe-7440-4ec0-97c7-498f4a999ad2","Type":"ContainerDied","Data":"d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb"} Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.384105 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5df993fe-7440-4ec0-97c7-498f4a999ad2","Type":"ContainerDied","Data":"8b0671cd302a792ed0e4eeb908d8064b40757cbab45efbab86f574e2a4790e18"} Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.384142 4611 scope.go:117] "RemoveContainer" containerID="d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.384243 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.436814 4611 scope.go:117] "RemoveContainer" containerID="c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.447481 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.447513 4611 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5df993fe-7440-4ec0-97c7-498f4a999ad2-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.453841 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.494534 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.504950 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:03:10 crc kubenswrapper[4611]: E0929 13:03:10.505422 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-metadata" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.505435 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-metadata" Sep 29 13:03:10 crc kubenswrapper[4611]: E0929 13:03:10.505474 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-log" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.505480 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-log" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.505681 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-metadata" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.505704 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" containerName="nova-metadata-log" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.506727 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.517550 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.517931 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.548575 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.548653 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-config-data\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.548595 4611 scope.go:117] "RemoveContainer" containerID="d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.548748 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjlmq\" (UniqueName: \"kubernetes.io/projected/38cd3c38-4553-4c07-8627-615a255435d2-kube-api-access-sjlmq\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.548853 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.549051 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38cd3c38-4553-4c07-8627-615a255435d2-logs\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: E0929 13:03:10.551869 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb\": container with ID starting with d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb not found: ID does not exist" containerID="d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.551903 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb"} err="failed to get container status \"d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb\": rpc error: code = NotFound desc = could not find container \"d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb\": container with ID starting with d24338247d5bffa2ef3ab13f2ad36946142ada210088e8d1acc5ef7d739865cb not found: ID does not exist" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.551925 4611 scope.go:117] "RemoveContainer" containerID="c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82" Sep 29 13:03:10 crc kubenswrapper[4611]: E0929 13:03:10.555596 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82\": container with ID starting with c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82 not found: ID does not exist" containerID="c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.555647 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82"} err="failed to get container status \"c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82\": rpc error: code = NotFound desc = could not find container \"c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82\": container with ID starting with c46afac69f6db6c82889f371656b7cec72b50f31be3c041618ea167afdffbd82 not found: ID does not exist" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.582428 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.650655 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjlmq\" (UniqueName: \"kubernetes.io/projected/38cd3c38-4553-4c07-8627-615a255435d2-kube-api-access-sjlmq\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.650747 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.650826 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38cd3c38-4553-4c07-8627-615a255435d2-logs\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.650873 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.650896 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-config-data\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.651357 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38cd3c38-4553-4c07-8627-615a255435d2-logs\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.656869 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-config-data\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.658348 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.658903 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38cd3c38-4553-4c07-8627-615a255435d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.675607 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjlmq\" (UniqueName: \"kubernetes.io/projected/38cd3c38-4553-4c07-8627-615a255435d2-kube-api-access-sjlmq\") pod \"nova-metadata-0\" (UID: \"38cd3c38-4553-4c07-8627-615a255435d2\") " pod="openstack/nova-metadata-0" Sep 29 13:03:10 crc kubenswrapper[4611]: I0929 13:03:10.836044 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 13:03:11 crc kubenswrapper[4611]: I0929 13:03:11.396101 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerStarted","Data":"0eb6fc18e2acf53303a64c9252161bed2f90f2b3ce2ddf74372d39a82bece954"} Sep 29 13:03:11 crc kubenswrapper[4611]: I0929 13:03:11.452214 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 13:03:11 crc kubenswrapper[4611]: I0929 13:03:11.758399 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5df993fe-7440-4ec0-97c7-498f4a999ad2" path="/var/lib/kubelet/pods/5df993fe-7440-4ec0-97c7-498f4a999ad2/volumes" Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.407396 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38cd3c38-4553-4c07-8627-615a255435d2","Type":"ContainerStarted","Data":"55568cee3ef790f1bb72fc4dc4bdc18c01b0089c0428b9ba18e5cab5e928856c"} Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.407759 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38cd3c38-4553-4c07-8627-615a255435d2","Type":"ContainerStarted","Data":"dbce2b722dae3ad6ccefade3c9defd0415706392f257f05110b12c6ad132b5dc"} Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.407775 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38cd3c38-4553-4c07-8627-615a255435d2","Type":"ContainerStarted","Data":"5d9bb6fca1f2794389dacc3b6fc04a49a8a4c8c77b59fad9ef81f48a49ca05f8"} Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.411455 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerStarted","Data":"8f13094a4d1d946b9bd6f0beff37f0ef63278ddfaa19db88671e921a51ae36dd"} Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.412419 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.442979 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.44295602 podStartE2EDuration="2.44295602s" podCreationTimestamp="2025-09-29 13:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:03:12.432653992 +0000 UTC m=+1379.324173628" watchObservedRunningTime="2025-09-29 13:03:12.44295602 +0000 UTC m=+1379.334475626" Sep 29 13:03:12 crc kubenswrapper[4611]: I0929 13:03:12.468464 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8884807179999998 podStartE2EDuration="5.468441708s" podCreationTimestamp="2025-09-29 13:03:07 +0000 UTC" firstStartedPulling="2025-09-29 13:03:08.522877051 +0000 UTC m=+1375.414396667" lastFinishedPulling="2025-09-29 13:03:12.102838051 +0000 UTC m=+1378.994357657" observedRunningTime="2025-09-29 13:03:12.466003147 +0000 UTC m=+1379.357522753" watchObservedRunningTime="2025-09-29 13:03:12.468441708 +0000 UTC m=+1379.359961314" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.267290 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.407182 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-config-data\") pod \"252195d0-8a55-4479-b2f6-3248f52fea78\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.407320 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-combined-ca-bundle\") pod \"252195d0-8a55-4479-b2f6-3248f52fea78\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.407420 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gzzx\" (UniqueName: \"kubernetes.io/projected/252195d0-8a55-4479-b2f6-3248f52fea78-kube-api-access-6gzzx\") pod \"252195d0-8a55-4479-b2f6-3248f52fea78\" (UID: \"252195d0-8a55-4479-b2f6-3248f52fea78\") " Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.413499 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252195d0-8a55-4479-b2f6-3248f52fea78-kube-api-access-6gzzx" (OuterVolumeSpecName: "kube-api-access-6gzzx") pod "252195d0-8a55-4479-b2f6-3248f52fea78" (UID: "252195d0-8a55-4479-b2f6-3248f52fea78"). InnerVolumeSpecName "kube-api-access-6gzzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.426071 4611 generic.go:334] "Generic (PLEG): container finished" podID="252195d0-8a55-4479-b2f6-3248f52fea78" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" exitCode=0 Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.427057 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.427885 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"252195d0-8a55-4479-b2f6-3248f52fea78","Type":"ContainerDied","Data":"51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596"} Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.427955 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"252195d0-8a55-4479-b2f6-3248f52fea78","Type":"ContainerDied","Data":"d16dd60a374946accc40ac83ddd5f4097ae8b9c77a725d6e1636a9140b6d0e72"} Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.427973 4611 scope.go:117] "RemoveContainer" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.465797 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-config-data" (OuterVolumeSpecName: "config-data") pod "252195d0-8a55-4479-b2f6-3248f52fea78" (UID: "252195d0-8a55-4479-b2f6-3248f52fea78"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.468830 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "252195d0-8a55-4479-b2f6-3248f52fea78" (UID: "252195d0-8a55-4479-b2f6-3248f52fea78"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.509495 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.509538 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252195d0-8a55-4479-b2f6-3248f52fea78-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.509552 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gzzx\" (UniqueName: \"kubernetes.io/projected/252195d0-8a55-4479-b2f6-3248f52fea78-kube-api-access-6gzzx\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.582573 4611 scope.go:117] "RemoveContainer" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" Sep 29 13:03:13 crc kubenswrapper[4611]: E0929 13:03:13.583190 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596\": container with ID starting with 51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596 not found: ID does not exist" containerID="51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.583274 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596"} err="failed to get container status \"51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596\": rpc error: code = NotFound desc = could not find container \"51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596\": container with ID starting with 51a756b593bd6480ab9b74d4d5547e7c1496118ca9eea9a8a81cd7a866cc8596 not found: ID does not exist" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.779006 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.805658 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.832819 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:03:13 crc kubenswrapper[4611]: E0929 13:03:13.833266 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252195d0-8a55-4479-b2f6-3248f52fea78" containerName="nova-scheduler-scheduler" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.833285 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="252195d0-8a55-4479-b2f6-3248f52fea78" containerName="nova-scheduler-scheduler" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.833466 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="252195d0-8a55-4479-b2f6-3248f52fea78" containerName="nova-scheduler-scheduler" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.834071 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.836872 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:03:13 crc kubenswrapper[4611]: I0929 13:03:13.837308 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.017820 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce84f09-60a3-4214-b3cb-85aca6574a83-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.017925 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce84f09-60a3-4214-b3cb-85aca6574a83-config-data\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.017974 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fq8f\" (UniqueName: \"kubernetes.io/projected/9ce84f09-60a3-4214-b3cb-85aca6574a83-kube-api-access-4fq8f\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.119842 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fq8f\" (UniqueName: \"kubernetes.io/projected/9ce84f09-60a3-4214-b3cb-85aca6574a83-kube-api-access-4fq8f\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.119966 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce84f09-60a3-4214-b3cb-85aca6574a83-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.120049 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce84f09-60a3-4214-b3cb-85aca6574a83-config-data\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.123958 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce84f09-60a3-4214-b3cb-85aca6574a83-config-data\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.131357 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce84f09-60a3-4214-b3cb-85aca6574a83-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.141380 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fq8f\" (UniqueName: \"kubernetes.io/projected/9ce84f09-60a3-4214-b3cb-85aca6574a83-kube-api-access-4fq8f\") pod \"nova-scheduler-0\" (UID: \"9ce84f09-60a3-4214-b3cb-85aca6574a83\") " pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.166707 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.460853 4611 generic.go:334] "Generic (PLEG): container finished" podID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerID="db8b8e15e41078bc2b3cf7cac6e743aee9d042a986a9dc98cf31b897d733c4aa" exitCode=0 Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.462012 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c8a027f-b0f8-47f4-9747-f727e6fb940c","Type":"ContainerDied","Data":"db8b8e15e41078bc2b3cf7cac6e743aee9d042a986a9dc98cf31b897d733c4aa"} Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.672390 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 13:03:14 crc kubenswrapper[4611]: I0929 13:03:14.882971 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.044225 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-internal-tls-certs\") pod \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.044413 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kqbs\" (UniqueName: \"kubernetes.io/projected/2c8a027f-b0f8-47f4-9747-f727e6fb940c-kube-api-access-6kqbs\") pod \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.044436 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c8a027f-b0f8-47f4-9747-f727e6fb940c-logs\") pod \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.044491 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-combined-ca-bundle\") pod \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.044543 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-config-data\") pod \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.044579 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-public-tls-certs\") pod \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\" (UID: \"2c8a027f-b0f8-47f4-9747-f727e6fb940c\") " Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.047115 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c8a027f-b0f8-47f4-9747-f727e6fb940c-logs" (OuterVolumeSpecName: "logs") pod "2c8a027f-b0f8-47f4-9747-f727e6fb940c" (UID: "2c8a027f-b0f8-47f4-9747-f727e6fb940c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.055909 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c8a027f-b0f8-47f4-9747-f727e6fb940c-kube-api-access-6kqbs" (OuterVolumeSpecName: "kube-api-access-6kqbs") pod "2c8a027f-b0f8-47f4-9747-f727e6fb940c" (UID: "2c8a027f-b0f8-47f4-9747-f727e6fb940c"). InnerVolumeSpecName "kube-api-access-6kqbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.090307 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c8a027f-b0f8-47f4-9747-f727e6fb940c" (UID: "2c8a027f-b0f8-47f4-9747-f727e6fb940c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.111368 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-config-data" (OuterVolumeSpecName: "config-data") pod "2c8a027f-b0f8-47f4-9747-f727e6fb940c" (UID: "2c8a027f-b0f8-47f4-9747-f727e6fb940c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.132859 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2c8a027f-b0f8-47f4-9747-f727e6fb940c" (UID: "2c8a027f-b0f8-47f4-9747-f727e6fb940c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.136518 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2c8a027f-b0f8-47f4-9747-f727e6fb940c" (UID: "2c8a027f-b0f8-47f4-9747-f727e6fb940c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.146998 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kqbs\" (UniqueName: \"kubernetes.io/projected/2c8a027f-b0f8-47f4-9747-f727e6fb940c-kube-api-access-6kqbs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.147211 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c8a027f-b0f8-47f4-9747-f727e6fb940c-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.147402 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.147503 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.147581 4611 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.147701 4611 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c8a027f-b0f8-47f4-9747-f727e6fb940c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.479080 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c8a027f-b0f8-47f4-9747-f727e6fb940c","Type":"ContainerDied","Data":"dad381fff56b4feb540c2d30e47b64d85f0b44a88e1cb7abb06941ea77c741ac"} Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.479128 4611 scope.go:117] "RemoveContainer" containerID="db8b8e15e41078bc2b3cf7cac6e743aee9d042a986a9dc98cf31b897d733c4aa" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.479138 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.484819 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9ce84f09-60a3-4214-b3cb-85aca6574a83","Type":"ContainerStarted","Data":"9bd0443e870ed305f25ede2a412e0eedf7b91a4123b53b08667dc2850b26d536"} Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.484890 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9ce84f09-60a3-4214-b3cb-85aca6574a83","Type":"ContainerStarted","Data":"66cb388a3b36f38327dec5d9756ec9d062ab5b3c1fb8008f87ca02beff8b7744"} Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.509236 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.50920942 podStartE2EDuration="2.50920942s" podCreationTimestamp="2025-09-29 13:03:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:03:15.503934828 +0000 UTC m=+1382.395454434" watchObservedRunningTime="2025-09-29 13:03:15.50920942 +0000 UTC m=+1382.400729026" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.567576 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.569197 4611 scope.go:117] "RemoveContainer" containerID="ed1cd677a4029765fb485d4b1de52cf26a01401ea9e7785ba360e515ce93b3db" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.577204 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.598923 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 13:03:15 crc kubenswrapper[4611]: E0929 13:03:15.599360 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-api" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.599373 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-api" Sep 29 13:03:15 crc kubenswrapper[4611]: E0929 13:03:15.599389 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-log" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.599396 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-log" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.599658 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-api" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.599676 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" containerName="nova-api-log" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.600820 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.603806 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.603975 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.604130 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.679444 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.747329 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="252195d0-8a55-4479-b2f6-3248f52fea78" path="/var/lib/kubelet/pods/252195d0-8a55-4479-b2f6-3248f52fea78/volumes" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.748064 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c8a027f-b0f8-47f4-9747-f727e6fb940c" path="/var/lib/kubelet/pods/2c8a027f-b0f8-47f4-9747-f727e6fb940c/volumes" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.758112 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c6d291-30d4-44d5-a1ec-877c30fc954f-logs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.758171 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gxz2\" (UniqueName: \"kubernetes.io/projected/a9c6d291-30d4-44d5-a1ec-877c30fc954f-kube-api-access-4gxz2\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.758207 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-public-tls-certs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.758230 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.758333 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.758394 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-config-data\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.836957 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.837008 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.860428 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-config-data\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.860595 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c6d291-30d4-44d5-a1ec-877c30fc954f-logs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.860713 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gxz2\" (UniqueName: \"kubernetes.io/projected/a9c6d291-30d4-44d5-a1ec-877c30fc954f-kube-api-access-4gxz2\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.860751 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-public-tls-certs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.860772 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.860816 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.861160 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c6d291-30d4-44d5-a1ec-877c30fc954f-logs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.866455 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.867234 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-config-data\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.869273 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.876122 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c6d291-30d4-44d5-a1ec-877c30fc954f-public-tls-certs\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.880088 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gxz2\" (UniqueName: \"kubernetes.io/projected/a9c6d291-30d4-44d5-a1ec-877c30fc954f-kube-api-access-4gxz2\") pod \"nova-api-0\" (UID: \"a9c6d291-30d4-44d5-a1ec-877c30fc954f\") " pod="openstack/nova-api-0" Sep 29 13:03:15 crc kubenswrapper[4611]: I0929 13:03:15.932418 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 13:03:16 crc kubenswrapper[4611]: I0929 13:03:16.430043 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 13:03:16 crc kubenswrapper[4611]: W0929 13:03:16.431384 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9c6d291_30d4_44d5_a1ec_877c30fc954f.slice/crio-b6e715777df93845dc49f9202d704812d3ed8167b1698336f2b94d93e25f027a WatchSource:0}: Error finding container b6e715777df93845dc49f9202d704812d3ed8167b1698336f2b94d93e25f027a: Status 404 returned error can't find the container with id b6e715777df93845dc49f9202d704812d3ed8167b1698336f2b94d93e25f027a Sep 29 13:03:16 crc kubenswrapper[4611]: I0929 13:03:16.498024 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a9c6d291-30d4-44d5-a1ec-877c30fc954f","Type":"ContainerStarted","Data":"b6e715777df93845dc49f9202d704812d3ed8167b1698336f2b94d93e25f027a"} Sep 29 13:03:17 crc kubenswrapper[4611]: I0929 13:03:17.511065 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a9c6d291-30d4-44d5-a1ec-877c30fc954f","Type":"ContainerStarted","Data":"c1259bafa137903537ba2f45de0062b4a294bfe2bf7c9a07ed5377f26f52eb86"} Sep 29 13:03:17 crc kubenswrapper[4611]: I0929 13:03:17.513323 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a9c6d291-30d4-44d5-a1ec-877c30fc954f","Type":"ContainerStarted","Data":"3d61791f5683f082ebe5362ed94c9b9431acbccb453813ebec91c33defcd8594"} Sep 29 13:03:17 crc kubenswrapper[4611]: I0929 13:03:17.531708 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.531692787 podStartE2EDuration="2.531692787s" podCreationTimestamp="2025-09-29 13:03:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:03:17.528420353 +0000 UTC m=+1384.419939959" watchObservedRunningTime="2025-09-29 13:03:17.531692787 +0000 UTC m=+1384.423212393" Sep 29 13:03:19 crc kubenswrapper[4611]: I0929 13:03:19.167944 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 13:03:20 crc kubenswrapper[4611]: I0929 13:03:20.836888 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 13:03:20 crc kubenswrapper[4611]: I0929 13:03:20.836945 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 13:03:21 crc kubenswrapper[4611]: I0929 13:03:21.850808 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="38cd3c38-4553-4c07-8627-615a255435d2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:03:21 crc kubenswrapper[4611]: I0929 13:03:21.850808 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="38cd3c38-4553-4c07-8627-615a255435d2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:03:24 crc kubenswrapper[4611]: I0929 13:03:24.167689 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 13:03:24 crc kubenswrapper[4611]: I0929 13:03:24.193221 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 13:03:24 crc kubenswrapper[4611]: I0929 13:03:24.600783 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 13:03:25 crc kubenswrapper[4611]: I0929 13:03:25.933336 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:03:25 crc kubenswrapper[4611]: I0929 13:03:25.933705 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 13:03:26 crc kubenswrapper[4611]: I0929 13:03:26.945907 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a9c6d291-30d4-44d5-a1ec-877c30fc954f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.213:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:03:26 crc kubenswrapper[4611]: I0929 13:03:26.946210 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a9c6d291-30d4-44d5-a1ec-877c30fc954f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.213:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:03:30 crc kubenswrapper[4611]: I0929 13:03:30.842910 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 13:03:30 crc kubenswrapper[4611]: I0929 13:03:30.843531 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 13:03:30 crc kubenswrapper[4611]: I0929 13:03:30.847768 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 13:03:30 crc kubenswrapper[4611]: I0929 13:03:30.852605 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 13:03:34 crc kubenswrapper[4611]: I0929 13:03:34.628970 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:03:34 crc kubenswrapper[4611]: I0929 13:03:34.629554 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:03:35 crc kubenswrapper[4611]: I0929 13:03:35.943283 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 13:03:35 crc kubenswrapper[4611]: I0929 13:03:35.944319 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 13:03:35 crc kubenswrapper[4611]: I0929 13:03:35.944652 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 13:03:35 crc kubenswrapper[4611]: I0929 13:03:35.960166 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 13:03:36 crc kubenswrapper[4611]: I0929 13:03:36.687674 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 13:03:36 crc kubenswrapper[4611]: I0929 13:03:36.697414 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 13:03:38 crc kubenswrapper[4611]: I0929 13:03:38.009718 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 13:03:48 crc kubenswrapper[4611]: I0929 13:03:48.771901 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 13:03:49 crc kubenswrapper[4611]: I0929 13:03:49.683899 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 13:03:52 crc kubenswrapper[4611]: I0929 13:03:52.484909 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerName="rabbitmq" containerID="cri-o://626c6b63a0bf75740e17a5a7ea410d7433099baf1d9cee485485a254e2cac77c" gracePeriod=604797 Sep 29 13:03:53 crc kubenswrapper[4611]: I0929 13:03:53.008158 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerName="rabbitmq" containerID="cri-o://cb1724d6ea5c29546933c2f22f5ec658deda7f84e8465c9001b046ec486d7a65" gracePeriod=604797 Sep 29 13:03:53 crc kubenswrapper[4611]: I0929 13:03:53.851063 4611 generic.go:334] "Generic (PLEG): container finished" podID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerID="626c6b63a0bf75740e17a5a7ea410d7433099baf1d9cee485485a254e2cac77c" exitCode=0 Sep 29 13:03:53 crc kubenswrapper[4611]: I0929 13:03:53.851272 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"eb4ad743-3387-43bc-b15d-e3d4b0825793","Type":"ContainerDied","Data":"626c6b63a0bf75740e17a5a7ea410d7433099baf1d9cee485485a254e2cac77c"} Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.109543 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209002 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llr25\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-kube-api-access-llr25\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209067 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-server-conf\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209094 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-erlang-cookie\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209174 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/eb4ad743-3387-43bc-b15d-e3d4b0825793-erlang-cookie-secret\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209269 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-tls\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209344 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-config-data\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209388 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/eb4ad743-3387-43bc-b15d-e3d4b0825793-pod-info\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209438 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209486 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-plugins-conf\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209522 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-confd\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.209569 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-plugins\") pod \"eb4ad743-3387-43bc-b15d-e3d4b0825793\" (UID: \"eb4ad743-3387-43bc-b15d-e3d4b0825793\") " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.211811 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.212968 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.217707 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.226946 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.230262 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/eb4ad743-3387-43bc-b15d-e3d4b0825793-pod-info" (OuterVolumeSpecName: "pod-info") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.242957 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-kube-api-access-llr25" (OuterVolumeSpecName: "kube-api-access-llr25") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "kube-api-access-llr25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.246911 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.285165 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb4ad743-3387-43bc-b15d-e3d4b0825793-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.295247 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-config-data" (OuterVolumeSpecName: "config-data") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315222 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llr25\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-kube-api-access-llr25\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315277 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315483 4611 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/eb4ad743-3387-43bc-b15d-e3d4b0825793-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315492 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315502 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315512 4611 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/eb4ad743-3387-43bc-b15d-e3d4b0825793-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315745 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315760 4611 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.315769 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.324286 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-server-conf" (OuterVolumeSpecName: "server-conf") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.371795 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.417915 4611 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/eb4ad743-3387-43bc-b15d-e3d4b0825793-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.417956 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.549921 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "eb4ad743-3387-43bc-b15d-e3d4b0825793" (UID: "eb4ad743-3387-43bc-b15d-e3d4b0825793"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.631965 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/eb4ad743-3387-43bc-b15d-e3d4b0825793-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.875584 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.875791 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"eb4ad743-3387-43bc-b15d-e3d4b0825793","Type":"ContainerDied","Data":"b9fb1b646267ea31ecfc11c6b369e5c0a74405c367e4ecb2972818f0663580e3"} Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.875850 4611 scope.go:117] "RemoveContainer" containerID="626c6b63a0bf75740e17a5a7ea410d7433099baf1d9cee485485a254e2cac77c" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.888702 4611 generic.go:334] "Generic (PLEG): container finished" podID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerID="cb1724d6ea5c29546933c2f22f5ec658deda7f84e8465c9001b046ec486d7a65" exitCode=0 Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.888768 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a8501653-441a-4c5f-b098-bc5fb7aeba22","Type":"ContainerDied","Data":"cb1724d6ea5c29546933c2f22f5ec658deda7f84e8465c9001b046ec486d7a65"} Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.888793 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a8501653-441a-4c5f-b098-bc5fb7aeba22","Type":"ContainerDied","Data":"a812ce5b0bd22e35fda76bb127803a7de863c94a5d2ab34c362e0138bc12489c"} Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.888805 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a812ce5b0bd22e35fda76bb127803a7de863c94a5d2ab34c362e0138bc12489c" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.958565 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:54 crc kubenswrapper[4611]: I0929 13:03:54.993733 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.008518 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.024790 4611 scope.go:117] "RemoveContainer" containerID="be1f39ea68722bb92f5a313b14311073c5463b6ee64113518c5a704781fb9c26" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043253 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a8501653-441a-4c5f-b098-bc5fb7aeba22-pod-info\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043288 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043366 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-config-data\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043454 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-server-conf\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043529 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-erlang-cookie\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043567 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-tls\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043598 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-confd\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043616 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-plugins-conf\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043664 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a8501653-441a-4c5f-b098-bc5fb7aeba22-erlang-cookie-secret\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043690 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zt98\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-kube-api-access-7zt98\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.043739 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-plugins\") pod \"a8501653-441a-4c5f-b098-bc5fb7aeba22\" (UID: \"a8501653-441a-4c5f-b098-bc5fb7aeba22\") " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.047107 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.049850 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.051737 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.081062 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a8501653-441a-4c5f-b098-bc5fb7aeba22-pod-info" (OuterVolumeSpecName: "pod-info") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.081121 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.082215 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8501653-441a-4c5f-b098-bc5fb7aeba22-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.091823 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.098342 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: E0929 13:03:55.098968 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerName="setup-container" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.098986 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerName="setup-container" Sep 29 13:03:55 crc kubenswrapper[4611]: E0929 13:03:55.098997 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerName="rabbitmq" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.099003 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerName="rabbitmq" Sep 29 13:03:55 crc kubenswrapper[4611]: E0929 13:03:55.099015 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerName="rabbitmq" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.099020 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerName="rabbitmq" Sep 29 13:03:55 crc kubenswrapper[4611]: E0929 13:03:55.099038 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerName="setup-container" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.099043 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerName="setup-container" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.099267 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" containerName="rabbitmq" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.099291 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" containerName="rabbitmq" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.100254 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.106999 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.107058 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.107410 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-xm2qj" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.107953 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.108244 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.108393 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.108791 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-kube-api-access-7zt98" (OuterVolumeSpecName: "kube-api-access-7zt98") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "kube-api-access-7zt98". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.112800 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147537 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147567 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147578 4611 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147588 4611 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a8501653-441a-4c5f-b098-bc5fb7aeba22-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147598 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zt98\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-kube-api-access-7zt98\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147607 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147617 4611 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a8501653-441a-4c5f-b098-bc5fb7aeba22-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.147657 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.194706 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-config-data" (OuterVolumeSpecName: "config-data") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.206084 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.251471 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.252084 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.252116 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.252147 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-config-data\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.252195 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b39f6c6-fcef-4959-b3ca-2e18f587762e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.252334 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.254887 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ljrd\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-kube-api-access-8ljrd\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.255024 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b39f6c6-fcef-4959-b3ca-2e18f587762e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.255074 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.255107 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.255143 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.255329 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.284107 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.304350 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-server-conf" (OuterVolumeSpecName: "server-conf") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.356623 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.356679 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ljrd\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-kube-api-access-8ljrd\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.357920 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.360858 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b39f6c6-fcef-4959-b3ca-2e18f587762e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.360926 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.361145 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.361184 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.361315 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.361380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.361460 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.362267 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.362533 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.362607 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-config-data\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.362648 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b39f6c6-fcef-4959-b3ca-2e18f587762e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.362767 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.362781 4611 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a8501653-441a-4c5f-b098-bc5fb7aeba22-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.363486 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.363680 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.365708 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3b39f6c6-fcef-4959-b3ca-2e18f587762e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.365953 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b39f6c6-fcef-4959-b3ca-2e18f587762e-config-data\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.370304 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.370427 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.373322 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3b39f6c6-fcef-4959-b3ca-2e18f587762e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.378784 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ljrd\" (UniqueName: \"kubernetes.io/projected/3b39f6c6-fcef-4959-b3ca-2e18f587762e-kube-api-access-8ljrd\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.423698 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"3b39f6c6-fcef-4959-b3ca-2e18f587762e\") " pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.424951 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a8501653-441a-4c5f-b098-bc5fb7aeba22" (UID: "a8501653-441a-4c5f-b098-bc5fb7aeba22"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.464397 4611 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a8501653-441a-4c5f-b098-bc5fb7aeba22-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.566923 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.754075 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb4ad743-3387-43bc-b15d-e3d4b0825793" path="/var/lib/kubelet/pods/eb4ad743-3387-43bc-b15d-e3d4b0825793/volumes" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.899137 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.928174 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.957411 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.965943 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.967847 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.973570 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.973924 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.974057 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.974169 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.974275 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7v52b" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.974390 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.974499 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 13:03:55 crc kubenswrapper[4611]: I0929 13:03:55.975896 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074714 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074782 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074822 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074856 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074879 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074901 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkgck\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-kube-api-access-bkgck\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.074928 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.075001 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.075032 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.075096 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.075139 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.085315 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176663 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176729 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176810 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176860 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176911 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176954 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.176999 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.177039 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.177064 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.177087 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkgck\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-kube-api-access-bkgck\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.177113 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.178385 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.178714 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.179058 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.179832 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.181712 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.181961 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.185193 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.185807 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.188766 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.189539 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.203706 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkgck\" (UniqueName: \"kubernetes.io/projected/8646b629-62bf-4405-b9ec-e2bcbceeb8bb-kube-api-access-bkgck\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.236580 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8646b629-62bf-4405-b9ec-e2bcbceeb8bb\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.294100 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.768817 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 13:03:56 crc kubenswrapper[4611]: W0929 13:03:56.776872 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8646b629_62bf_4405_b9ec_e2bcbceeb8bb.slice/crio-66b52856c72f86bc5d5272cb0d6111261083312cdc16217da33ba7d07f6bfa51 WatchSource:0}: Error finding container 66b52856c72f86bc5d5272cb0d6111261083312cdc16217da33ba7d07f6bfa51: Status 404 returned error can't find the container with id 66b52856c72f86bc5d5272cb0d6111261083312cdc16217da33ba7d07f6bfa51 Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.917473 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3b39f6c6-fcef-4959-b3ca-2e18f587762e","Type":"ContainerStarted","Data":"fac6461ee94dfe250a790421b9f910da985b88059d26e36bf6b565356b1c3de5"} Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.917527 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3b39f6c6-fcef-4959-b3ca-2e18f587762e","Type":"ContainerStarted","Data":"96f89ea3409434e61bafdbcb0134d1d52ae74b56c5ca688df20593bab9de59b8"} Sep 29 13:03:56 crc kubenswrapper[4611]: I0929 13:03:56.921276 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8646b629-62bf-4405-b9ec-e2bcbceeb8bb","Type":"ContainerStarted","Data":"66b52856c72f86bc5d5272cb0d6111261083312cdc16217da33ba7d07f6bfa51"} Sep 29 13:03:57 crc kubenswrapper[4611]: I0929 13:03:57.751828 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8501653-441a-4c5f-b098-bc5fb7aeba22" path="/var/lib/kubelet/pods/a8501653-441a-4c5f-b098-bc5fb7aeba22/volumes" Sep 29 13:03:57 crc kubenswrapper[4611]: I0929 13:03:57.959249 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8646b629-62bf-4405-b9ec-e2bcbceeb8bb","Type":"ContainerStarted","Data":"cdac4aaaf2ee931f6bcaf253f53f33d65dd9c76bf8d2ed17fbe4979e5578b0a0"} Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.598590 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kx8fc"] Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.601061 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.623041 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kx8fc"] Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.683859 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdhzt\" (UniqueName: \"kubernetes.io/projected/e1b74564-1f7e-4004-8f70-f80aaad116ed-kube-api-access-bdhzt\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.684223 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-utilities\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.684258 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-catalog-content\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.786154 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdhzt\" (UniqueName: \"kubernetes.io/projected/e1b74564-1f7e-4004-8f70-f80aaad116ed-kube-api-access-bdhzt\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.786261 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-utilities\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.786294 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-catalog-content\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.787030 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-catalog-content\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.787585 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-utilities\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.808011 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdhzt\" (UniqueName: \"kubernetes.io/projected/e1b74564-1f7e-4004-8f70-f80aaad116ed-kube-api-access-bdhzt\") pod \"community-operators-kx8fc\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:01 crc kubenswrapper[4611]: I0929 13:04:01.924071 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:02 crc kubenswrapper[4611]: I0929 13:04:02.387070 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kx8fc"] Sep 29 13:04:03 crc kubenswrapper[4611]: I0929 13:04:03.011508 4611 generic.go:334] "Generic (PLEG): container finished" podID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerID="6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde" exitCode=0 Sep 29 13:04:03 crc kubenswrapper[4611]: I0929 13:04:03.011579 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kx8fc" event={"ID":"e1b74564-1f7e-4004-8f70-f80aaad116ed","Type":"ContainerDied","Data":"6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde"} Sep 29 13:04:03 crc kubenswrapper[4611]: I0929 13:04:03.013200 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kx8fc" event={"ID":"e1b74564-1f7e-4004-8f70-f80aaad116ed","Type":"ContainerStarted","Data":"412edf110b000e71fe34fce9d10fa0543173f4d4bdee83bfa58061ca194ac4c3"} Sep 29 13:04:04 crc kubenswrapper[4611]: I0929 13:04:04.631455 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:04:04 crc kubenswrapper[4611]: I0929 13:04:04.631878 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:04:04 crc kubenswrapper[4611]: I0929 13:04:04.631931 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:04:04 crc kubenswrapper[4611]: I0929 13:04:04.632698 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7942f9fe9e1c29e9bc5facddd920c692e815332925345c3fdce8ee2caca74f90"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:04:04 crc kubenswrapper[4611]: I0929 13:04:04.632772 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://7942f9fe9e1c29e9bc5facddd920c692e815332925345c3fdce8ee2caca74f90" gracePeriod=600 Sep 29 13:04:05 crc kubenswrapper[4611]: I0929 13:04:05.039119 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="7942f9fe9e1c29e9bc5facddd920c692e815332925345c3fdce8ee2caca74f90" exitCode=0 Sep 29 13:04:05 crc kubenswrapper[4611]: I0929 13:04:05.039461 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"7942f9fe9e1c29e9bc5facddd920c692e815332925345c3fdce8ee2caca74f90"} Sep 29 13:04:05 crc kubenswrapper[4611]: I0929 13:04:05.039531 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc"} Sep 29 13:04:05 crc kubenswrapper[4611]: I0929 13:04:05.039547 4611 scope.go:117] "RemoveContainer" containerID="adbfe6821ab82328582fac5fae1e1a588692912e18a6cfaa37c8967ac7e74a78" Sep 29 13:04:05 crc kubenswrapper[4611]: I0929 13:04:05.042402 4611 generic.go:334] "Generic (PLEG): container finished" podID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerID="20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee" exitCode=0 Sep 29 13:04:05 crc kubenswrapper[4611]: I0929 13:04:05.042431 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kx8fc" event={"ID":"e1b74564-1f7e-4004-8f70-f80aaad116ed","Type":"ContainerDied","Data":"20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee"} Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.056225 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kx8fc" event={"ID":"e1b74564-1f7e-4004-8f70-f80aaad116ed","Type":"ContainerStarted","Data":"ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7"} Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.086518 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kx8fc" podStartSLOduration=2.568815002 podStartE2EDuration="5.086500033s" podCreationTimestamp="2025-09-29 13:04:01 +0000 UTC" firstStartedPulling="2025-09-29 13:04:03.014577558 +0000 UTC m=+1429.906097164" lastFinishedPulling="2025-09-29 13:04:05.532262589 +0000 UTC m=+1432.423782195" observedRunningTime="2025-09-29 13:04:06.077601935 +0000 UTC m=+1432.969121551" watchObservedRunningTime="2025-09-29 13:04:06.086500033 +0000 UTC m=+1432.978019639" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.749053 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c6cf6b69-xdjm8"] Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.750856 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.753106 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.772008 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c6cf6b69-xdjm8"] Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.898869 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.898920 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-config\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.899088 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.899276 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-swift-storage-0\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.899347 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-svc\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.899687 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5chr\" (UniqueName: \"kubernetes.io/projected/248154a6-39e5-4663-a59c-4d85132d4e7b-kube-api-access-v5chr\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:06 crc kubenswrapper[4611]: I0929 13:04:06.899721 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.007512 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5chr\" (UniqueName: \"kubernetes.io/projected/248154a6-39e5-4663-a59c-4d85132d4e7b-kube-api-access-v5chr\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.007817 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.007954 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.008049 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-config\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.008190 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.008362 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-swift-storage-0\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.008547 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-svc\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.008926 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.009058 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.009258 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-config\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.009258 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.009268 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-swift-storage-0\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.009473 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-svc\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.041361 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5chr\" (UniqueName: \"kubernetes.io/projected/248154a6-39e5-4663-a59c-4d85132d4e7b-kube-api-access-v5chr\") pod \"dnsmasq-dns-5c6cf6b69-xdjm8\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.072659 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:07 crc kubenswrapper[4611]: I0929 13:04:07.614063 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c6cf6b69-xdjm8"] Sep 29 13:04:08 crc kubenswrapper[4611]: I0929 13:04:08.107764 4611 generic.go:334] "Generic (PLEG): container finished" podID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerID="4fed542c9764facf98667597b5c294f1861530d13cc12d72c853a618e3fecd0c" exitCode=0 Sep 29 13:04:08 crc kubenswrapper[4611]: I0929 13:04:08.107825 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" event={"ID":"248154a6-39e5-4663-a59c-4d85132d4e7b","Type":"ContainerDied","Data":"4fed542c9764facf98667597b5c294f1861530d13cc12d72c853a618e3fecd0c"} Sep 29 13:04:08 crc kubenswrapper[4611]: I0929 13:04:08.108187 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" event={"ID":"248154a6-39e5-4663-a59c-4d85132d4e7b","Type":"ContainerStarted","Data":"2b50070fc3b7d40c5cdaae3631f08b319fc8f15358cf69138eaec7c448b65dad"} Sep 29 13:04:09 crc kubenswrapper[4611]: I0929 13:04:09.119211 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" event={"ID":"248154a6-39e5-4663-a59c-4d85132d4e7b","Type":"ContainerStarted","Data":"1ecd772c05171aacec2579f4c394b0e436668d00cb2aa1d83275ecf962ce9d11"} Sep 29 13:04:09 crc kubenswrapper[4611]: I0929 13:04:09.119568 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:09 crc kubenswrapper[4611]: I0929 13:04:09.142127 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" podStartSLOduration=3.142102385 podStartE2EDuration="3.142102385s" podCreationTimestamp="2025-09-29 13:04:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:04:09.140160358 +0000 UTC m=+1436.031679964" watchObservedRunningTime="2025-09-29 13:04:09.142102385 +0000 UTC m=+1436.033622001" Sep 29 13:04:11 crc kubenswrapper[4611]: I0929 13:04:11.924907 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:11 crc kubenswrapper[4611]: I0929 13:04:11.925242 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:12 crc kubenswrapper[4611]: I0929 13:04:12.968788 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-kx8fc" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="registry-server" probeResult="failure" output=< Sep 29 13:04:12 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:04:12 crc kubenswrapper[4611]: > Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.074126 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.171708 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d6dc8bf89-lpj5s"] Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.171985 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerName="dnsmasq-dns" containerID="cri-o://b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5" gracePeriod=10 Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.382648 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c7c498869-wtqdv"] Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.392648 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.402652 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c7c498869-wtqdv"] Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540080 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-dns-svc\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540194 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-config\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540263 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-ovsdbserver-nb\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540306 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf7c4\" (UniqueName: \"kubernetes.io/projected/c4579287-c56b-417d-b05f-ee78f9aea474-kube-api-access-lf7c4\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540332 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-dns-swift-storage-0\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540404 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.540430 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-openstack-edpm-ipam\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646291 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-ovsdbserver-nb\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646364 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf7c4\" (UniqueName: \"kubernetes.io/projected/c4579287-c56b-417d-b05f-ee78f9aea474-kube-api-access-lf7c4\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646389 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-dns-swift-storage-0\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646466 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646495 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-openstack-edpm-ipam\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646564 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-dns-svc\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.646613 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-config\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.647587 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-ovsdbserver-nb\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.648439 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-dns-swift-storage-0\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.649223 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.649677 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-openstack-edpm-ipam\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.649972 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-config\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.650077 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4579287-c56b-417d-b05f-ee78f9aea474-dns-svc\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.705933 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf7c4\" (UniqueName: \"kubernetes.io/projected/c4579287-c56b-417d-b05f-ee78f9aea474-kube-api-access-lf7c4\") pod \"dnsmasq-dns-6c7c498869-wtqdv\" (UID: \"c4579287-c56b-417d-b05f-ee78f9aea474\") " pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.740992 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.791778 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.957934 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-nb\") pod \"fc465e99-037f-4fc4-acca-31b22fd061b5\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.957988 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcm5z\" (UniqueName: \"kubernetes.io/projected/fc465e99-037f-4fc4-acca-31b22fd061b5-kube-api-access-tcm5z\") pod \"fc465e99-037f-4fc4-acca-31b22fd061b5\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.958044 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-svc\") pod \"fc465e99-037f-4fc4-acca-31b22fd061b5\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.958077 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-config\") pod \"fc465e99-037f-4fc4-acca-31b22fd061b5\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.958153 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-swift-storage-0\") pod \"fc465e99-037f-4fc4-acca-31b22fd061b5\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.958247 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-sb\") pod \"fc465e99-037f-4fc4-acca-31b22fd061b5\" (UID: \"fc465e99-037f-4fc4-acca-31b22fd061b5\") " Sep 29 13:04:17 crc kubenswrapper[4611]: I0929 13:04:17.967528 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc465e99-037f-4fc4-acca-31b22fd061b5-kube-api-access-tcm5z" (OuterVolumeSpecName: "kube-api-access-tcm5z") pod "fc465e99-037f-4fc4-acca-31b22fd061b5" (UID: "fc465e99-037f-4fc4-acca-31b22fd061b5"). InnerVolumeSpecName "kube-api-access-tcm5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.023211 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-config" (OuterVolumeSpecName: "config") pod "fc465e99-037f-4fc4-acca-31b22fd061b5" (UID: "fc465e99-037f-4fc4-acca-31b22fd061b5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.031439 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fc465e99-037f-4fc4-acca-31b22fd061b5" (UID: "fc465e99-037f-4fc4-acca-31b22fd061b5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.042490 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fc465e99-037f-4fc4-acca-31b22fd061b5" (UID: "fc465e99-037f-4fc4-acca-31b22fd061b5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.060401 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.060482 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcm5z\" (UniqueName: \"kubernetes.io/projected/fc465e99-037f-4fc4-acca-31b22fd061b5-kube-api-access-tcm5z\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.060498 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.060506 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.076752 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fc465e99-037f-4fc4-acca-31b22fd061b5" (UID: "fc465e99-037f-4fc4-acca-31b22fd061b5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.107267 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fc465e99-037f-4fc4-acca-31b22fd061b5" (UID: "fc465e99-037f-4fc4-acca-31b22fd061b5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.162836 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.162871 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc465e99-037f-4fc4-acca-31b22fd061b5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.223801 4611 generic.go:334] "Generic (PLEG): container finished" podID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerID="b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5" exitCode=0 Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.223845 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" event={"ID":"fc465e99-037f-4fc4-acca-31b22fd061b5","Type":"ContainerDied","Data":"b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5"} Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.223870 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" event={"ID":"fc465e99-037f-4fc4-acca-31b22fd061b5","Type":"ContainerDied","Data":"88c299dd417dd5f4e031d454c515e02b1e8bfa7526473f144177a171e9b5aa5e"} Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.223886 4611 scope.go:117] "RemoveContainer" containerID="b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.224010 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6dc8bf89-lpj5s" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.249997 4611 scope.go:117] "RemoveContainer" containerID="09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.262691 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d6dc8bf89-lpj5s"] Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.270970 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d6dc8bf89-lpj5s"] Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.279484 4611 scope.go:117] "RemoveContainer" containerID="b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5" Sep 29 13:04:18 crc kubenswrapper[4611]: E0929 13:04:18.280116 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5\": container with ID starting with b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5 not found: ID does not exist" containerID="b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.280210 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5"} err="failed to get container status \"b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5\": rpc error: code = NotFound desc = could not find container \"b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5\": container with ID starting with b6c5fe6f512fbc8c9af0acd9e12f742521bde214dabfadf3ca441342ee03ade5 not found: ID does not exist" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.280244 4611 scope.go:117] "RemoveContainer" containerID="09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225" Sep 29 13:04:18 crc kubenswrapper[4611]: E0929 13:04:18.280735 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225\": container with ID starting with 09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225 not found: ID does not exist" containerID="09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.280776 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225"} err="failed to get container status \"09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225\": rpc error: code = NotFound desc = could not find container \"09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225\": container with ID starting with 09264b92f66d54433ec0eb39dceb95f42683f96f206679cb40573f2dc1fbf225 not found: ID does not exist" Sep 29 13:04:18 crc kubenswrapper[4611]: I0929 13:04:18.361509 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c7c498869-wtqdv"] Sep 29 13:04:18 crc kubenswrapper[4611]: W0929 13:04:18.365833 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4579287_c56b_417d_b05f_ee78f9aea474.slice/crio-d7e925e98c7dce3984a2a15aec40f7d99f68efc6a4c2e9c1622273044da2c1d9 WatchSource:0}: Error finding container d7e925e98c7dce3984a2a15aec40f7d99f68efc6a4c2e9c1622273044da2c1d9: Status 404 returned error can't find the container with id d7e925e98c7dce3984a2a15aec40f7d99f68efc6a4c2e9c1622273044da2c1d9 Sep 29 13:04:19 crc kubenswrapper[4611]: I0929 13:04:19.237026 4611 generic.go:334] "Generic (PLEG): container finished" podID="c4579287-c56b-417d-b05f-ee78f9aea474" containerID="1cbd95c113c4223e1c1477b327ad16e3ce3af422deaa7a6805a545ef5f14212c" exitCode=0 Sep 29 13:04:19 crc kubenswrapper[4611]: I0929 13:04:19.237107 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" event={"ID":"c4579287-c56b-417d-b05f-ee78f9aea474","Type":"ContainerDied","Data":"1cbd95c113c4223e1c1477b327ad16e3ce3af422deaa7a6805a545ef5f14212c"} Sep 29 13:04:19 crc kubenswrapper[4611]: I0929 13:04:19.237409 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" event={"ID":"c4579287-c56b-417d-b05f-ee78f9aea474","Type":"ContainerStarted","Data":"d7e925e98c7dce3984a2a15aec40f7d99f68efc6a4c2e9c1622273044da2c1d9"} Sep 29 13:04:19 crc kubenswrapper[4611]: I0929 13:04:19.747305 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" path="/var/lib/kubelet/pods/fc465e99-037f-4fc4-acca-31b22fd061b5/volumes" Sep 29 13:04:20 crc kubenswrapper[4611]: I0929 13:04:20.250116 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" event={"ID":"c4579287-c56b-417d-b05f-ee78f9aea474","Type":"ContainerStarted","Data":"944f6e9692fe17538015e4bb2d8c7a66933b6ecd8d86339862fc2f046757cac4"} Sep 29 13:04:20 crc kubenswrapper[4611]: I0929 13:04:20.250751 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:20 crc kubenswrapper[4611]: I0929 13:04:20.276013 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" podStartSLOduration=3.275987164 podStartE2EDuration="3.275987164s" podCreationTimestamp="2025-09-29 13:04:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:04:20.269812836 +0000 UTC m=+1447.161332442" watchObservedRunningTime="2025-09-29 13:04:20.275987164 +0000 UTC m=+1447.167506770" Sep 29 13:04:21 crc kubenswrapper[4611]: I0929 13:04:21.978875 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:22 crc kubenswrapper[4611]: I0929 13:04:22.033728 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:22 crc kubenswrapper[4611]: I0929 13:04:22.217565 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kx8fc"] Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.274600 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kx8fc" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="registry-server" containerID="cri-o://ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7" gracePeriod=2 Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.743377 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.868929 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-catalog-content\") pod \"e1b74564-1f7e-4004-8f70-f80aaad116ed\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.869126 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdhzt\" (UniqueName: \"kubernetes.io/projected/e1b74564-1f7e-4004-8f70-f80aaad116ed-kube-api-access-bdhzt\") pod \"e1b74564-1f7e-4004-8f70-f80aaad116ed\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.869250 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-utilities\") pod \"e1b74564-1f7e-4004-8f70-f80aaad116ed\" (UID: \"e1b74564-1f7e-4004-8f70-f80aaad116ed\") " Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.869875 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-utilities" (OuterVolumeSpecName: "utilities") pod "e1b74564-1f7e-4004-8f70-f80aaad116ed" (UID: "e1b74564-1f7e-4004-8f70-f80aaad116ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.872295 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.874146 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1b74564-1f7e-4004-8f70-f80aaad116ed-kube-api-access-bdhzt" (OuterVolumeSpecName: "kube-api-access-bdhzt") pod "e1b74564-1f7e-4004-8f70-f80aaad116ed" (UID: "e1b74564-1f7e-4004-8f70-f80aaad116ed"). InnerVolumeSpecName "kube-api-access-bdhzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.924208 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1b74564-1f7e-4004-8f70-f80aaad116ed" (UID: "e1b74564-1f7e-4004-8f70-f80aaad116ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.974154 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdhzt\" (UniqueName: \"kubernetes.io/projected/e1b74564-1f7e-4004-8f70-f80aaad116ed-kube-api-access-bdhzt\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:23 crc kubenswrapper[4611]: I0929 13:04:23.974470 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1b74564-1f7e-4004-8f70-f80aaad116ed-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.284841 4611 generic.go:334] "Generic (PLEG): container finished" podID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerID="ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7" exitCode=0 Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.284891 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kx8fc" event={"ID":"e1b74564-1f7e-4004-8f70-f80aaad116ed","Type":"ContainerDied","Data":"ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7"} Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.284921 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kx8fc" event={"ID":"e1b74564-1f7e-4004-8f70-f80aaad116ed","Type":"ContainerDied","Data":"412edf110b000e71fe34fce9d10fa0543173f4d4bdee83bfa58061ca194ac4c3"} Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.284939 4611 scope.go:117] "RemoveContainer" containerID="ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.285870 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kx8fc" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.312177 4611 scope.go:117] "RemoveContainer" containerID="20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.318206 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kx8fc"] Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.327185 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kx8fc"] Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.334477 4611 scope.go:117] "RemoveContainer" containerID="6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.384108 4611 scope.go:117] "RemoveContainer" containerID="ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7" Sep 29 13:04:24 crc kubenswrapper[4611]: E0929 13:04:24.384823 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7\": container with ID starting with ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7 not found: ID does not exist" containerID="ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.384901 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7"} err="failed to get container status \"ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7\": rpc error: code = NotFound desc = could not find container \"ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7\": container with ID starting with ca466dd67e6ecbee8f4d324f327d31def0cd584cb347dd4e901f6e218ae396f7 not found: ID does not exist" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.384969 4611 scope.go:117] "RemoveContainer" containerID="20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee" Sep 29 13:04:24 crc kubenswrapper[4611]: E0929 13:04:24.385858 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee\": container with ID starting with 20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee not found: ID does not exist" containerID="20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.385953 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee"} err="failed to get container status \"20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee\": rpc error: code = NotFound desc = could not find container \"20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee\": container with ID starting with 20d037cec8dce71588a0cf3615fd84950a9a045f7db1cedacd0b652af46fb0ee not found: ID does not exist" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.386035 4611 scope.go:117] "RemoveContainer" containerID="6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde" Sep 29 13:04:24 crc kubenswrapper[4611]: E0929 13:04:24.386749 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde\": container with ID starting with 6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde not found: ID does not exist" containerID="6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde" Sep 29 13:04:24 crc kubenswrapper[4611]: I0929 13:04:24.386788 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde"} err="failed to get container status \"6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde\": rpc error: code = NotFound desc = could not find container \"6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde\": container with ID starting with 6410f3b71ed421352037f92eada107a46350384a11e10b0c0788227b04130dde not found: ID does not exist" Sep 29 13:04:25 crc kubenswrapper[4611]: I0929 13:04:25.748038 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" path="/var/lib/kubelet/pods/e1b74564-1f7e-4004-8f70-f80aaad116ed/volumes" Sep 29 13:04:27 crc kubenswrapper[4611]: I0929 13:04:27.317279 4611 generic.go:334] "Generic (PLEG): container finished" podID="3b39f6c6-fcef-4959-b3ca-2e18f587762e" containerID="fac6461ee94dfe250a790421b9f910da985b88059d26e36bf6b565356b1c3de5" exitCode=0 Sep 29 13:04:27 crc kubenswrapper[4611]: I0929 13:04:27.317375 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3b39f6c6-fcef-4959-b3ca-2e18f587762e","Type":"ContainerDied","Data":"fac6461ee94dfe250a790421b9f910da985b88059d26e36bf6b565356b1c3de5"} Sep 29 13:04:27 crc kubenswrapper[4611]: I0929 13:04:27.761930 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c7c498869-wtqdv" Sep 29 13:04:27 crc kubenswrapper[4611]: I0929 13:04:27.829529 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c6cf6b69-xdjm8"] Sep 29 13:04:27 crc kubenswrapper[4611]: I0929 13:04:27.829859 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerName="dnsmasq-dns" containerID="cri-o://1ecd772c05171aacec2579f4c394b0e436668d00cb2aa1d83275ecf962ce9d11" gracePeriod=10 Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.329775 4611 generic.go:334] "Generic (PLEG): container finished" podID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerID="1ecd772c05171aacec2579f4c394b0e436668d00cb2aa1d83275ecf962ce9d11" exitCode=0 Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.329893 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" event={"ID":"248154a6-39e5-4663-a59c-4d85132d4e7b","Type":"ContainerDied","Data":"1ecd772c05171aacec2579f4c394b0e436668d00cb2aa1d83275ecf962ce9d11"} Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.330309 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" event={"ID":"248154a6-39e5-4663-a59c-4d85132d4e7b","Type":"ContainerDied","Data":"2b50070fc3b7d40c5cdaae3631f08b319fc8f15358cf69138eaec7c448b65dad"} Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.330330 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b50070fc3b7d40c5cdaae3631f08b319fc8f15358cf69138eaec7c448b65dad" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.332541 4611 generic.go:334] "Generic (PLEG): container finished" podID="8646b629-62bf-4405-b9ec-e2bcbceeb8bb" containerID="cdac4aaaf2ee931f6bcaf253f53f33d65dd9c76bf8d2ed17fbe4979e5578b0a0" exitCode=0 Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.332599 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8646b629-62bf-4405-b9ec-e2bcbceeb8bb","Type":"ContainerDied","Data":"cdac4aaaf2ee931f6bcaf253f53f33d65dd9c76bf8d2ed17fbe4979e5578b0a0"} Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.336158 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3b39f6c6-fcef-4959-b3ca-2e18f587762e","Type":"ContainerStarted","Data":"d8353e05b0d8c99a50f05707782cc2ce78f0f85040bf21c346ab98d2ba681946"} Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.336972 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.438780 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.438763566 podStartE2EDuration="33.438763566s" podCreationTimestamp="2025-09-29 13:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:04:28.436992035 +0000 UTC m=+1455.328511651" watchObservedRunningTime="2025-09-29 13:04:28.438763566 +0000 UTC m=+1455.330283172" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.593324 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.701210 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-swift-storage-0\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.701298 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-nb\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.701349 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-sb\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.701406 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-openstack-edpm-ipam\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.701460 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-config\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.701540 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5chr\" (UniqueName: \"kubernetes.io/projected/248154a6-39e5-4663-a59c-4d85132d4e7b-kube-api-access-v5chr\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.702023 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-svc\") pod \"248154a6-39e5-4663-a59c-4d85132d4e7b\" (UID: \"248154a6-39e5-4663-a59c-4d85132d4e7b\") " Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.711916 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/248154a6-39e5-4663-a59c-4d85132d4e7b-kube-api-access-v5chr" (OuterVolumeSpecName: "kube-api-access-v5chr") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "kube-api-access-v5chr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.799574 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.805543 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5chr\" (UniqueName: \"kubernetes.io/projected/248154a6-39e5-4663-a59c-4d85132d4e7b-kube-api-access-v5chr\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.805591 4611 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.812198 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.826780 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.830293 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.830363 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-config" (OuterVolumeSpecName: "config") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.832015 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "248154a6-39e5-4663-a59c-4d85132d4e7b" (UID: "248154a6-39e5-4663-a59c-4d85132d4e7b"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.911463 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.911498 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.911516 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.911527 4611 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:28 crc kubenswrapper[4611]: I0929 13:04:28.911537 4611 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/248154a6-39e5-4663-a59c-4d85132d4e7b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 13:04:29 crc kubenswrapper[4611]: I0929 13:04:29.349686 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8646b629-62bf-4405-b9ec-e2bcbceeb8bb","Type":"ContainerStarted","Data":"cb94e92703a207589d5d18cdf482415106cd73a27eb36c4597bdd259a71409cc"} Sep 29 13:04:29 crc kubenswrapper[4611]: I0929 13:04:29.349789 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c6cf6b69-xdjm8" Sep 29 13:04:29 crc kubenswrapper[4611]: I0929 13:04:29.381511 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.381491637 podStartE2EDuration="34.381491637s" podCreationTimestamp="2025-09-29 13:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:04:29.372221669 +0000 UTC m=+1456.263741295" watchObservedRunningTime="2025-09-29 13:04:29.381491637 +0000 UTC m=+1456.273011243" Sep 29 13:04:29 crc kubenswrapper[4611]: I0929 13:04:29.412608 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c6cf6b69-xdjm8"] Sep 29 13:04:29 crc kubenswrapper[4611]: I0929 13:04:29.424010 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c6cf6b69-xdjm8"] Sep 29 13:04:29 crc kubenswrapper[4611]: I0929 13:04:29.747997 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" path="/var/lib/kubelet/pods/248154a6-39e5-4663-a59c-4d85132d4e7b/volumes" Sep 29 13:04:33 crc kubenswrapper[4611]: I0929 13:04:33.360118 4611 scope.go:117] "RemoveContainer" containerID="4aff2a9484c42afaad021cd6f49cd771106e959a435c58a56b2aa05d93ca305c" Sep 29 13:04:33 crc kubenswrapper[4611]: I0929 13:04:33.387916 4611 scope.go:117] "RemoveContainer" containerID="3dfee8d7bb12af3564c72d9234b8e24eae27c1ab6e23a547e02cb38b0272c1c3" Sep 29 13:04:33 crc kubenswrapper[4611]: I0929 13:04:33.464144 4611 scope.go:117] "RemoveContainer" containerID="45beec9f037b8f8d5137a9276aea854ea3ce4bc7e5b0a0be47e058d6a1f01757" Sep 29 13:04:33 crc kubenswrapper[4611]: I0929 13:04:33.501524 4611 scope.go:117] "RemoveContainer" containerID="37c546f89bec485841fd676ff8aebfc4ea1bff2eb84df400fd44497f90f7c177" Sep 29 13:04:33 crc kubenswrapper[4611]: I0929 13:04:33.529294 4611 scope.go:117] "RemoveContainer" containerID="f30b7e41d633212e1aaf6d47c74ed2472a82c0f09854d45b4fba620b6bfed093" Sep 29 13:04:33 crc kubenswrapper[4611]: I0929 13:04:33.571053 4611 scope.go:117] "RemoveContainer" containerID="cb1724d6ea5c29546933c2f22f5ec658deda7f84e8465c9001b046ec486d7a65" Sep 29 13:04:36 crc kubenswrapper[4611]: I0929 13:04:36.294558 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:04:45 crc kubenswrapper[4611]: I0929 13:04:45.569883 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.297129 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.367990 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w"] Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368351 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerName="init" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368366 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerName="init" Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368380 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="registry-server" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368388 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="registry-server" Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368403 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="extract-content" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368410 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="extract-content" Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368448 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerName="dnsmasq-dns" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368456 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerName="dnsmasq-dns" Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368471 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerName="dnsmasq-dns" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368477 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerName="dnsmasq-dns" Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368487 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="extract-utilities" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368493 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="extract-utilities" Sep 29 13:04:46 crc kubenswrapper[4611]: E0929 13:04:46.368509 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerName="init" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368515 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerName="init" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368690 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc465e99-037f-4fc4-acca-31b22fd061b5" containerName="dnsmasq-dns" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368716 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="248154a6-39e5-4663-a59c-4d85132d4e7b" containerName="dnsmasq-dns" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.368730 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1b74564-1f7e-4004-8f70-f80aaad116ed" containerName="registry-server" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.369358 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.381902 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.382140 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.382280 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.382484 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.401723 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w"] Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.532105 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.532459 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.532600 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd7sk\" (UniqueName: \"kubernetes.io/projected/642768bf-2945-467e-bed5-c02808905701-kube-api-access-rd7sk\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.532763 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.634196 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.634309 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.634344 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd7sk\" (UniqueName: \"kubernetes.io/projected/642768bf-2945-467e-bed5-c02808905701-kube-api-access-rd7sk\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.634370 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.640564 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.641211 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.649410 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.657760 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd7sk\" (UniqueName: \"kubernetes.io/projected/642768bf-2945-467e-bed5-c02808905701-kube-api-access-rd7sk\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-2878w\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:46 crc kubenswrapper[4611]: I0929 13:04:46.691785 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:04:47 crc kubenswrapper[4611]: I0929 13:04:47.435390 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w"] Sep 29 13:04:47 crc kubenswrapper[4611]: W0929 13:04:47.437925 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod642768bf_2945_467e_bed5_c02808905701.slice/crio-2700c26671cb17b79bfdef9d0ad85e8f47fcef3e581e5135f1dd9f25122e9c28 WatchSource:0}: Error finding container 2700c26671cb17b79bfdef9d0ad85e8f47fcef3e581e5135f1dd9f25122e9c28: Status 404 returned error can't find the container with id 2700c26671cb17b79bfdef9d0ad85e8f47fcef3e581e5135f1dd9f25122e9c28 Sep 29 13:04:47 crc kubenswrapper[4611]: I0929 13:04:47.517958 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" event={"ID":"642768bf-2945-467e-bed5-c02808905701","Type":"ContainerStarted","Data":"2700c26671cb17b79bfdef9d0ad85e8f47fcef3e581e5135f1dd9f25122e9c28"} Sep 29 13:04:57 crc kubenswrapper[4611]: I0929 13:04:57.626220 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" event={"ID":"642768bf-2945-467e-bed5-c02808905701","Type":"ContainerStarted","Data":"06bbd0ca22fdc0ad77a701cec25bd03d03a790704df70493507d39f8b64ddfc4"} Sep 29 13:04:57 crc kubenswrapper[4611]: I0929 13:04:57.650194 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" podStartSLOduration=1.747846686 podStartE2EDuration="11.650166789s" podCreationTimestamp="2025-09-29 13:04:46 +0000 UTC" firstStartedPulling="2025-09-29 13:04:47.443038449 +0000 UTC m=+1474.334558055" lastFinishedPulling="2025-09-29 13:04:57.345358552 +0000 UTC m=+1484.236878158" observedRunningTime="2025-09-29 13:04:57.643486356 +0000 UTC m=+1484.535005962" watchObservedRunningTime="2025-09-29 13:04:57.650166789 +0000 UTC m=+1484.541686395" Sep 29 13:05:10 crc kubenswrapper[4611]: I0929 13:05:10.751431 4611 generic.go:334] "Generic (PLEG): container finished" podID="642768bf-2945-467e-bed5-c02808905701" containerID="06bbd0ca22fdc0ad77a701cec25bd03d03a790704df70493507d39f8b64ddfc4" exitCode=0 Sep 29 13:05:10 crc kubenswrapper[4611]: I0929 13:05:10.751538 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" event={"ID":"642768bf-2945-467e-bed5-c02808905701","Type":"ContainerDied","Data":"06bbd0ca22fdc0ad77a701cec25bd03d03a790704df70493507d39f8b64ddfc4"} Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.470550 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.569527 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-ssh-key\") pod \"642768bf-2945-467e-bed5-c02808905701\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.569607 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-repo-setup-combined-ca-bundle\") pod \"642768bf-2945-467e-bed5-c02808905701\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.569745 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-inventory\") pod \"642768bf-2945-467e-bed5-c02808905701\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.569853 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd7sk\" (UniqueName: \"kubernetes.io/projected/642768bf-2945-467e-bed5-c02808905701-kube-api-access-rd7sk\") pod \"642768bf-2945-467e-bed5-c02808905701\" (UID: \"642768bf-2945-467e-bed5-c02808905701\") " Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.583727 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "642768bf-2945-467e-bed5-c02808905701" (UID: "642768bf-2945-467e-bed5-c02808905701"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.594989 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/642768bf-2945-467e-bed5-c02808905701-kube-api-access-rd7sk" (OuterVolumeSpecName: "kube-api-access-rd7sk") pod "642768bf-2945-467e-bed5-c02808905701" (UID: "642768bf-2945-467e-bed5-c02808905701"). InnerVolumeSpecName "kube-api-access-rd7sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.644786 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-inventory" (OuterVolumeSpecName: "inventory") pod "642768bf-2945-467e-bed5-c02808905701" (UID: "642768bf-2945-467e-bed5-c02808905701"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.673208 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd7sk\" (UniqueName: \"kubernetes.io/projected/642768bf-2945-467e-bed5-c02808905701-kube-api-access-rd7sk\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.673246 4611 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.673260 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.675799 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "642768bf-2945-467e-bed5-c02808905701" (UID: "642768bf-2945-467e-bed5-c02808905701"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.772800 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" event={"ID":"642768bf-2945-467e-bed5-c02808905701","Type":"ContainerDied","Data":"2700c26671cb17b79bfdef9d0ad85e8f47fcef3e581e5135f1dd9f25122e9c28"} Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.772847 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2700c26671cb17b79bfdef9d0ad85e8f47fcef3e581e5135f1dd9f25122e9c28" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.772881 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-2878w" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.773943 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/642768bf-2945-467e-bed5-c02808905701-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.901855 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4"] Sep 29 13:05:12 crc kubenswrapper[4611]: E0929 13:05:12.902348 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="642768bf-2945-467e-bed5-c02808905701" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.902374 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="642768bf-2945-467e-bed5-c02808905701" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.902649 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="642768bf-2945-467e-bed5-c02808905701" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.903348 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.906655 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.906849 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.906947 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.920271 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4"] Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.920655 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.977404 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8h5s\" (UniqueName: \"kubernetes.io/projected/4e6fc46f-36a2-4d36-a82e-877539513437-kube-api-access-k8h5s\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.977530 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:12 crc kubenswrapper[4611]: I0929 13:05:12.977671 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.078869 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.078955 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.079033 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8h5s\" (UniqueName: \"kubernetes.io/projected/4e6fc46f-36a2-4d36-a82e-877539513437-kube-api-access-k8h5s\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.083246 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.083246 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.099400 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8h5s\" (UniqueName: \"kubernetes.io/projected/4e6fc46f-36a2-4d36-a82e-877539513437-kube-api-access-k8h5s\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dvcs4\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.229966 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:13 crc kubenswrapper[4611]: I0929 13:05:13.810338 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4"] Sep 29 13:05:14 crc kubenswrapper[4611]: I0929 13:05:14.335706 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:05:14 crc kubenswrapper[4611]: I0929 13:05:14.794153 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" event={"ID":"4e6fc46f-36a2-4d36-a82e-877539513437","Type":"ContainerStarted","Data":"74ba0b4607d01bcae8c82227b9f94499ada9e75eea6e30044f9685b208cd85ee"} Sep 29 13:05:14 crc kubenswrapper[4611]: I0929 13:05:14.794530 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" event={"ID":"4e6fc46f-36a2-4d36-a82e-877539513437","Type":"ContainerStarted","Data":"30622852f2d503c1871b746bf243ec71789ee43ac95acfd7f78f82fc207e80a6"} Sep 29 13:05:14 crc kubenswrapper[4611]: I0929 13:05:14.815353 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" podStartSLOduration=2.294249115 podStartE2EDuration="2.815330499s" podCreationTimestamp="2025-09-29 13:05:12 +0000 UTC" firstStartedPulling="2025-09-29 13:05:13.811365576 +0000 UTC m=+1500.702885182" lastFinishedPulling="2025-09-29 13:05:14.33244696 +0000 UTC m=+1501.223966566" observedRunningTime="2025-09-29 13:05:14.810843709 +0000 UTC m=+1501.702363335" watchObservedRunningTime="2025-09-29 13:05:14.815330499 +0000 UTC m=+1501.706850115" Sep 29 13:05:17 crc kubenswrapper[4611]: I0929 13:05:17.826313 4611 generic.go:334] "Generic (PLEG): container finished" podID="4e6fc46f-36a2-4d36-a82e-877539513437" containerID="74ba0b4607d01bcae8c82227b9f94499ada9e75eea6e30044f9685b208cd85ee" exitCode=0 Sep 29 13:05:17 crc kubenswrapper[4611]: I0929 13:05:17.826398 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" event={"ID":"4e6fc46f-36a2-4d36-a82e-877539513437","Type":"ContainerDied","Data":"74ba0b4607d01bcae8c82227b9f94499ada9e75eea6e30044f9685b208cd85ee"} Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.326825 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.331733 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-ssh-key\") pod \"4e6fc46f-36a2-4d36-a82e-877539513437\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.331793 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8h5s\" (UniqueName: \"kubernetes.io/projected/4e6fc46f-36a2-4d36-a82e-877539513437-kube-api-access-k8h5s\") pod \"4e6fc46f-36a2-4d36-a82e-877539513437\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.331914 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-inventory\") pod \"4e6fc46f-36a2-4d36-a82e-877539513437\" (UID: \"4e6fc46f-36a2-4d36-a82e-877539513437\") " Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.341443 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e6fc46f-36a2-4d36-a82e-877539513437-kube-api-access-k8h5s" (OuterVolumeSpecName: "kube-api-access-k8h5s") pod "4e6fc46f-36a2-4d36-a82e-877539513437" (UID: "4e6fc46f-36a2-4d36-a82e-877539513437"). InnerVolumeSpecName "kube-api-access-k8h5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.380881 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4e6fc46f-36a2-4d36-a82e-877539513437" (UID: "4e6fc46f-36a2-4d36-a82e-877539513437"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.386698 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-inventory" (OuterVolumeSpecName: "inventory") pod "4e6fc46f-36a2-4d36-a82e-877539513437" (UID: "4e6fc46f-36a2-4d36-a82e-877539513437"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.442265 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.442315 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8h5s\" (UniqueName: \"kubernetes.io/projected/4e6fc46f-36a2-4d36-a82e-877539513437-kube-api-access-k8h5s\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.442333 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6fc46f-36a2-4d36-a82e-877539513437-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.846233 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" event={"ID":"4e6fc46f-36a2-4d36-a82e-877539513437","Type":"ContainerDied","Data":"30622852f2d503c1871b746bf243ec71789ee43ac95acfd7f78f82fc207e80a6"} Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.846852 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30622852f2d503c1871b746bf243ec71789ee43ac95acfd7f78f82fc207e80a6" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.846438 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dvcs4" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.944008 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6"] Sep 29 13:05:19 crc kubenswrapper[4611]: E0929 13:05:19.944646 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6fc46f-36a2-4d36-a82e-877539513437" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.944669 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6fc46f-36a2-4d36-a82e-877539513437" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.944883 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6fc46f-36a2-4d36-a82e-877539513437" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.945673 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.951357 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.953685 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.954054 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.954881 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:05:19 crc kubenswrapper[4611]: I0929 13:05:19.960656 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6"] Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.053129 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.053200 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.053334 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x75fx\" (UniqueName: \"kubernetes.io/projected/482ab873-2d1f-421c-b3b7-ec74175ad046-kube-api-access-x75fx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.053367 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.155446 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x75fx\" (UniqueName: \"kubernetes.io/projected/482ab873-2d1f-421c-b3b7-ec74175ad046-kube-api-access-x75fx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.155728 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.155828 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.155994 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.171084 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.185574 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.186823 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.194098 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x75fx\" (UniqueName: \"kubernetes.io/projected/482ab873-2d1f-421c-b3b7-ec74175ad046-kube-api-access-x75fx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.280390 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.840297 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6"] Sep 29 13:05:20 crc kubenswrapper[4611]: I0929 13:05:20.858225 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" event={"ID":"482ab873-2d1f-421c-b3b7-ec74175ad046","Type":"ContainerStarted","Data":"5b96556a3a4176a35e0f6ea5aca746008a91c738d27bfb17683d48178367302b"} Sep 29 13:05:22 crc kubenswrapper[4611]: I0929 13:05:22.878774 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" event={"ID":"482ab873-2d1f-421c-b3b7-ec74175ad046","Type":"ContainerStarted","Data":"0ecd1db708e45012703db07cdd4e7096a37acefe4581ff125d137bbfaa26121a"} Sep 29 13:05:22 crc kubenswrapper[4611]: I0929 13:05:22.893935 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" podStartSLOduration=3.4069827999999998 podStartE2EDuration="3.893915126s" podCreationTimestamp="2025-09-29 13:05:19 +0000 UTC" firstStartedPulling="2025-09-29 13:05:20.84796859 +0000 UTC m=+1507.739488196" lastFinishedPulling="2025-09-29 13:05:21.334900916 +0000 UTC m=+1508.226420522" observedRunningTime="2025-09-29 13:05:22.891250259 +0000 UTC m=+1509.782769875" watchObservedRunningTime="2025-09-29 13:05:22.893915126 +0000 UTC m=+1509.785434742" Sep 29 13:05:33 crc kubenswrapper[4611]: I0929 13:05:33.761375 4611 scope.go:117] "RemoveContainer" containerID="e03bc1fc0c6ce4dc97eceec069147b1d9f30b474c0d9907a0f9fcb3103b8d65d" Sep 29 13:05:33 crc kubenswrapper[4611]: I0929 13:05:33.785600 4611 scope.go:117] "RemoveContainer" containerID="cdd5f8b20832ce876cb2582356fe5b5b757d58bd4fb563981d1946788e6c232f" Sep 29 13:05:33 crc kubenswrapper[4611]: I0929 13:05:33.809952 4611 scope.go:117] "RemoveContainer" containerID="4ea898188c86a90de47be684c4db4ec7d64bc1a93786ea06b1e3a4993de7e37a" Sep 29 13:05:33 crc kubenswrapper[4611]: I0929 13:05:33.830592 4611 scope.go:117] "RemoveContainer" containerID="bbb1bce54b28fd3c8f245e2258c71bad04553da19d048c56c6c79fec534d1e13" Sep 29 13:05:33 crc kubenswrapper[4611]: I0929 13:05:33.850068 4611 scope.go:117] "RemoveContainer" containerID="21e5532b5f6c50aea86fdda44c584dc060e36c5f9eb62b1d3a38fe7ad7d97f03" Sep 29 13:05:33 crc kubenswrapper[4611]: I0929 13:05:33.887613 4611 scope.go:117] "RemoveContainer" containerID="4d950c12a63512a84c79c8f95ead50918914bddf69b70a98a68baab58793f419" Sep 29 13:06:04 crc kubenswrapper[4611]: I0929 13:06:04.629053 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:06:04 crc kubenswrapper[4611]: I0929 13:06:04.629436 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:06:34 crc kubenswrapper[4611]: I0929 13:06:34.007373 4611 scope.go:117] "RemoveContainer" containerID="5d71ea815ea44b121494615ef8733654a74798a7d69d693916dd0367cc2776c1" Sep 29 13:06:34 crc kubenswrapper[4611]: I0929 13:06:34.040972 4611 scope.go:117] "RemoveContainer" containerID="1e7c474a3b8a6754c41a0e4dab3c9c847c65125b8cb0a875e9c712b8d679227a" Sep 29 13:06:34 crc kubenswrapper[4611]: I0929 13:06:34.063244 4611 scope.go:117] "RemoveContainer" containerID="6e001040663132fa6d98e1547c84d37b6fca049464c8bab2177e1d60f78b2508" Sep 29 13:06:34 crc kubenswrapper[4611]: I0929 13:06:34.629115 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:06:34 crc kubenswrapper[4611]: I0929 13:06:34.629461 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.628883 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.629489 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.629546 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.630365 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.630421 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" gracePeriod=600 Sep 29 13:07:04 crc kubenswrapper[4611]: E0929 13:07:04.773971 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.889883 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" exitCode=0 Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.890063 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc"} Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.890269 4611 scope.go:117] "RemoveContainer" containerID="7942f9fe9e1c29e9bc5facddd920c692e815332925345c3fdce8ee2caca74f90" Sep 29 13:07:04 crc kubenswrapper[4611]: I0929 13:07:04.891082 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:07:04 crc kubenswrapper[4611]: E0929 13:07:04.891430 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:07:16 crc kubenswrapper[4611]: I0929 13:07:16.736984 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:07:16 crc kubenswrapper[4611]: E0929 13:07:16.738439 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:07:29 crc kubenswrapper[4611]: I0929 13:07:29.736551 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:07:29 crc kubenswrapper[4611]: E0929 13:07:29.737310 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:07:44 crc kubenswrapper[4611]: I0929 13:07:44.737426 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:07:44 crc kubenswrapper[4611]: E0929 13:07:44.739792 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.448953 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b2v92"] Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.453123 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.463698 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2v92"] Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.542646 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmg7l\" (UniqueName: \"kubernetes.io/projected/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-kube-api-access-hmg7l\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.542765 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-utilities\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.542851 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-catalog-content\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.644405 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-utilities\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.644489 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-catalog-content\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.644590 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmg7l\" (UniqueName: \"kubernetes.io/projected/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-kube-api-access-hmg7l\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.644928 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-utilities\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.645149 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-catalog-content\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.674451 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmg7l\" (UniqueName: \"kubernetes.io/projected/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-kube-api-access-hmg7l\") pod \"redhat-marketplace-b2v92\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:55 crc kubenswrapper[4611]: I0929 13:07:55.775767 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:07:56 crc kubenswrapper[4611]: I0929 13:07:56.387880 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2v92"] Sep 29 13:07:56 crc kubenswrapper[4611]: I0929 13:07:56.444826 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2v92" event={"ID":"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0","Type":"ContainerStarted","Data":"431186005ce7ef20910daa98abad501bf490a63802422f8b2fed2c9a3b8549e7"} Sep 29 13:07:57 crc kubenswrapper[4611]: I0929 13:07:57.458260 4611 generic.go:334] "Generic (PLEG): container finished" podID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerID="1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00" exitCode=0 Sep 29 13:07:57 crc kubenswrapper[4611]: I0929 13:07:57.458561 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2v92" event={"ID":"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0","Type":"ContainerDied","Data":"1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00"} Sep 29 13:07:57 crc kubenswrapper[4611]: I0929 13:07:57.462065 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:07:58 crc kubenswrapper[4611]: I0929 13:07:58.739262 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:07:58 crc kubenswrapper[4611]: E0929 13:07:58.739912 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.036545 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rlvtq"] Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.038602 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.082580 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rlvtq"] Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.220563 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-catalog-content\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.220649 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh79g\" (UniqueName: \"kubernetes.io/projected/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-kube-api-access-gh79g\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.220671 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-utilities\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.322161 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-catalog-content\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.322251 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh79g\" (UniqueName: \"kubernetes.io/projected/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-kube-api-access-gh79g\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.322277 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-utilities\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.322858 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-catalog-content\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.322928 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-utilities\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.350164 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh79g\" (UniqueName: \"kubernetes.io/projected/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-kube-api-access-gh79g\") pod \"redhat-operators-rlvtq\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.365541 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.490103 4611 generic.go:334] "Generic (PLEG): container finished" podID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerID="1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789" exitCode=0 Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.490161 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2v92" event={"ID":"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0","Type":"ContainerDied","Data":"1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789"} Sep 29 13:07:59 crc kubenswrapper[4611]: I0929 13:07:59.970939 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rlvtq"] Sep 29 13:07:59 crc kubenswrapper[4611]: W0929 13:07:59.978040 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cb25c47_aeb6_43fa_b3ab_79dc689013e0.slice/crio-dd00c2a6c46a08e3b49eaa69978a6a209c512a3a4618eb25179b371f3dbaa175 WatchSource:0}: Error finding container dd00c2a6c46a08e3b49eaa69978a6a209c512a3a4618eb25179b371f3dbaa175: Status 404 returned error can't find the container with id dd00c2a6c46a08e3b49eaa69978a6a209c512a3a4618eb25179b371f3dbaa175 Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.037120 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ltql4"] Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.039443 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.064154 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ltql4"] Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.098344 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsl56\" (UniqueName: \"kubernetes.io/projected/a4d1060e-4c25-4189-8717-660f2afa8d88-kube-api-access-hsl56\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.098422 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-catalog-content\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.098636 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-utilities\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.202228 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsl56\" (UniqueName: \"kubernetes.io/projected/a4d1060e-4c25-4189-8717-660f2afa8d88-kube-api-access-hsl56\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.202309 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-catalog-content\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.202351 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-utilities\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.203113 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-utilities\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.203232 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-catalog-content\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.234959 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsl56\" (UniqueName: \"kubernetes.io/projected/a4d1060e-4c25-4189-8717-660f2afa8d88-kube-api-access-hsl56\") pod \"certified-operators-ltql4\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.426849 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.520204 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2v92" event={"ID":"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0","Type":"ContainerStarted","Data":"58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835"} Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.535310 4611 generic.go:334] "Generic (PLEG): container finished" podID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerID="05aa87a1dfb76a03cfa0f5ff2d5f96c824ca7158bb1514fffaf5b3c967410b49" exitCode=0 Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.535356 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerDied","Data":"05aa87a1dfb76a03cfa0f5ff2d5f96c824ca7158bb1514fffaf5b3c967410b49"} Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.535384 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerStarted","Data":"dd00c2a6c46a08e3b49eaa69978a6a209c512a3a4618eb25179b371f3dbaa175"} Sep 29 13:08:00 crc kubenswrapper[4611]: I0929 13:08:00.563028 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b2v92" podStartSLOduration=2.8907322840000003 podStartE2EDuration="5.563009667s" podCreationTimestamp="2025-09-29 13:07:55 +0000 UTC" firstStartedPulling="2025-09-29 13:07:57.461806088 +0000 UTC m=+1664.353325694" lastFinishedPulling="2025-09-29 13:08:00.134083471 +0000 UTC m=+1667.025603077" observedRunningTime="2025-09-29 13:08:00.548536858 +0000 UTC m=+1667.440056464" watchObservedRunningTime="2025-09-29 13:08:00.563009667 +0000 UTC m=+1667.454529273" Sep 29 13:08:01 crc kubenswrapper[4611]: I0929 13:08:01.117069 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ltql4"] Sep 29 13:08:01 crc kubenswrapper[4611]: I0929 13:08:01.546636 4611 generic.go:334] "Generic (PLEG): container finished" podID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerID="04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132" exitCode=0 Sep 29 13:08:01 crc kubenswrapper[4611]: I0929 13:08:01.548066 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerDied","Data":"04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132"} Sep 29 13:08:01 crc kubenswrapper[4611]: I0929 13:08:01.548096 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerStarted","Data":"9ab5bc03be95b2fa8703e5e2b345d3f34550ec3742596ec843946e90648a7c49"} Sep 29 13:08:02 crc kubenswrapper[4611]: I0929 13:08:02.557959 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerStarted","Data":"3a9344a9985a0d982dea79c6a3bb80fcf6d8d0a92d135cbd1041d7ac94e3a139"} Sep 29 13:08:03 crc kubenswrapper[4611]: I0929 13:08:03.573710 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerStarted","Data":"7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee"} Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.095450 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-q64bh"] Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.115577 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-q64bh"] Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.614309 4611 generic.go:334] "Generic (PLEG): container finished" podID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerID="7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee" exitCode=0 Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.614385 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerDied","Data":"7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee"} Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.751427 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e5e64a3-589f-4f55-a0fa-491c0ac42120" path="/var/lib/kubelet/pods/9e5e64a3-589f-4f55-a0fa-491c0ac42120/volumes" Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.776844 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.778188 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:08:05 crc kubenswrapper[4611]: I0929 13:08:05.869198 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.048314 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-s2w2b"] Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.060344 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-s2w2b"] Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.627478 4611 generic.go:334] "Generic (PLEG): container finished" podID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerID="3a9344a9985a0d982dea79c6a3bb80fcf6d8d0a92d135cbd1041d7ac94e3a139" exitCode=0 Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.627547 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerDied","Data":"3a9344a9985a0d982dea79c6a3bb80fcf6d8d0a92d135cbd1041d7ac94e3a139"} Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.635481 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerStarted","Data":"7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183"} Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.681846 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ltql4" podStartSLOduration=1.919332461 podStartE2EDuration="6.681827867s" podCreationTimestamp="2025-09-29 13:08:00 +0000 UTC" firstStartedPulling="2025-09-29 13:08:01.555327802 +0000 UTC m=+1668.446847408" lastFinishedPulling="2025-09-29 13:08:06.317823208 +0000 UTC m=+1673.209342814" observedRunningTime="2025-09-29 13:08:06.674576897 +0000 UTC m=+1673.566096523" watchObservedRunningTime="2025-09-29 13:08:06.681827867 +0000 UTC m=+1673.573347473" Sep 29 13:08:06 crc kubenswrapper[4611]: I0929 13:08:06.712241 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:08:07 crc kubenswrapper[4611]: I0929 13:08:07.649428 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerStarted","Data":"1fea6c909c78c51a5c2de2b0e88f2bc5c0440e978e1ff6255bc68bad4c62fe8e"} Sep 29 13:08:07 crc kubenswrapper[4611]: I0929 13:08:07.685759 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rlvtq" podStartSLOduration=2.200196521 podStartE2EDuration="8.685737438s" podCreationTimestamp="2025-09-29 13:07:59 +0000 UTC" firstStartedPulling="2025-09-29 13:08:00.539682633 +0000 UTC m=+1667.431202239" lastFinishedPulling="2025-09-29 13:08:07.02522356 +0000 UTC m=+1673.916743156" observedRunningTime="2025-09-29 13:08:07.673592817 +0000 UTC m=+1674.565112443" watchObservedRunningTime="2025-09-29 13:08:07.685737438 +0000 UTC m=+1674.577257044" Sep 29 13:08:07 crc kubenswrapper[4611]: I0929 13:08:07.750174 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11bc76a-123b-4356-9976-52ff27cea6f2" path="/var/lib/kubelet/pods/b11bc76a-123b-4356-9976-52ff27cea6f2/volumes" Sep 29 13:08:08 crc kubenswrapper[4611]: I0929 13:08:08.227219 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2v92"] Sep 29 13:08:09 crc kubenswrapper[4611]: I0929 13:08:09.366310 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:08:09 crc kubenswrapper[4611]: I0929 13:08:09.369760 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:08:09 crc kubenswrapper[4611]: I0929 13:08:09.671122 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b2v92" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="registry-server" containerID="cri-o://58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835" gracePeriod=2 Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.184276 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.224897 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-utilities\") pod \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.224954 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmg7l\" (UniqueName: \"kubernetes.io/projected/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-kube-api-access-hmg7l\") pod \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.224980 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-catalog-content\") pod \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\" (UID: \"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0\") " Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.227502 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-utilities" (OuterVolumeSpecName: "utilities") pod "6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" (UID: "6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.234798 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-kube-api-access-hmg7l" (OuterVolumeSpecName: "kube-api-access-hmg7l") pod "6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" (UID: "6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0"). InnerVolumeSpecName "kube-api-access-hmg7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.256156 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" (UID: "6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.327185 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmg7l\" (UniqueName: \"kubernetes.io/projected/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-kube-api-access-hmg7l\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.327227 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.327242 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.415701 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rlvtq" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" probeResult="failure" output=< Sep 29 13:08:10 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:08:10 crc kubenswrapper[4611]: > Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.427275 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.427329 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.683529 4611 generic.go:334] "Generic (PLEG): container finished" podID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerID="58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835" exitCode=0 Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.683592 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2v92" event={"ID":"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0","Type":"ContainerDied","Data":"58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835"} Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.683652 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2v92" event={"ID":"6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0","Type":"ContainerDied","Data":"431186005ce7ef20910daa98abad501bf490a63802422f8b2fed2c9a3b8549e7"} Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.683676 4611 scope.go:117] "RemoveContainer" containerID="58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.683842 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2v92" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.718585 4611 scope.go:117] "RemoveContainer" containerID="1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.720471 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2v92"] Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.736236 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2v92"] Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.742309 4611 scope.go:117] "RemoveContainer" containerID="1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.786208 4611 scope.go:117] "RemoveContainer" containerID="58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835" Sep 29 13:08:10 crc kubenswrapper[4611]: E0929 13:08:10.786857 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835\": container with ID starting with 58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835 not found: ID does not exist" containerID="58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.786915 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835"} err="failed to get container status \"58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835\": rpc error: code = NotFound desc = could not find container \"58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835\": container with ID starting with 58a178fb9cdaa196156c5a3a250cbef3300de16cefed7649202716051da57835 not found: ID does not exist" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.786947 4611 scope.go:117] "RemoveContainer" containerID="1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789" Sep 29 13:08:10 crc kubenswrapper[4611]: E0929 13:08:10.787254 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789\": container with ID starting with 1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789 not found: ID does not exist" containerID="1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.787280 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789"} err="failed to get container status \"1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789\": rpc error: code = NotFound desc = could not find container \"1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789\": container with ID starting with 1ca4193b7999243faff8d08d55c21338cac702dd55640d8bc85c61870e0bd789 not found: ID does not exist" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.787302 4611 scope.go:117] "RemoveContainer" containerID="1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00" Sep 29 13:08:10 crc kubenswrapper[4611]: E0929 13:08:10.787597 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00\": container with ID starting with 1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00 not found: ID does not exist" containerID="1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00" Sep 29 13:08:10 crc kubenswrapper[4611]: I0929 13:08:10.787718 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00"} err="failed to get container status \"1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00\": rpc error: code = NotFound desc = could not find container \"1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00\": container with ID starting with 1fecf3743827b590d2d0b25d5bfafbf4b1e33b9099c40fcc66c01cda1672ec00 not found: ID does not exist" Sep 29 13:08:11 crc kubenswrapper[4611]: I0929 13:08:11.475537 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-ltql4" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="registry-server" probeResult="failure" output=< Sep 29 13:08:11 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:08:11 crc kubenswrapper[4611]: > Sep 29 13:08:11 crc kubenswrapper[4611]: I0929 13:08:11.750030 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" path="/var/lib/kubelet/pods/6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0/volumes" Sep 29 13:08:12 crc kubenswrapper[4611]: I0929 13:08:12.042527 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mmxgc"] Sep 29 13:08:12 crc kubenswrapper[4611]: I0929 13:08:12.054215 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mmxgc"] Sep 29 13:08:12 crc kubenswrapper[4611]: I0929 13:08:12.736244 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:08:12 crc kubenswrapper[4611]: E0929 13:08:12.736513 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:08:13 crc kubenswrapper[4611]: I0929 13:08:13.748865 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6655f4be-50ff-4ca4-aec0-82d21d2cc552" path="/var/lib/kubelet/pods/6655f4be-50ff-4ca4-aec0-82d21d2cc552/volumes" Sep 29 13:08:14 crc kubenswrapper[4611]: I0929 13:08:14.029999 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-m4vwf"] Sep 29 13:08:14 crc kubenswrapper[4611]: I0929 13:08:14.039715 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-6kfbn"] Sep 29 13:08:14 crc kubenswrapper[4611]: I0929 13:08:14.048601 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-m4vwf"] Sep 29 13:08:14 crc kubenswrapper[4611]: I0929 13:08:14.057675 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-6kfbn"] Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.032067 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b110-account-create-v9jb8"] Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.041933 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-hvpz4"] Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.054480 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b110-account-create-v9jb8"] Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.064550 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-hvpz4"] Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.750018 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce7630f-ba0f-426d-8440-7525c4555235" path="/var/lib/kubelet/pods/1ce7630f-ba0f-426d-8440-7525c4555235/volumes" Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.751179 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c7901dd-cfe7-48a6-91c1-78fb168963cb" path="/var/lib/kubelet/pods/5c7901dd-cfe7-48a6-91c1-78fb168963cb/volumes" Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.751938 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="717832b2-73e3-4b2c-8f82-26603268ee98" path="/var/lib/kubelet/pods/717832b2-73e3-4b2c-8f82-26603268ee98/volumes" Sep 29 13:08:15 crc kubenswrapper[4611]: I0929 13:08:15.752595 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac707613-fa2b-4406-8789-2c0a072d49f5" path="/var/lib/kubelet/pods/ac707613-fa2b-4406-8789-2c0a072d49f5/volumes" Sep 29 13:08:16 crc kubenswrapper[4611]: I0929 13:08:16.039467 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9b28-account-create-nt47n"] Sep 29 13:08:16 crc kubenswrapper[4611]: I0929 13:08:16.053498 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9b28-account-create-nt47n"] Sep 29 13:08:17 crc kubenswrapper[4611]: I0929 13:08:17.748069 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42115bcd-dc69-491c-a920-96969813366c" path="/var/lib/kubelet/pods/42115bcd-dc69-491c-a920-96969813366c/volumes" Sep 29 13:08:20 crc kubenswrapper[4611]: I0929 13:08:20.412487 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rlvtq" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" probeResult="failure" output=< Sep 29 13:08:20 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:08:20 crc kubenswrapper[4611]: > Sep 29 13:08:20 crc kubenswrapper[4611]: I0929 13:08:20.480881 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:20 crc kubenswrapper[4611]: I0929 13:08:20.553720 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:20 crc kubenswrapper[4611]: I0929 13:08:20.728838 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ltql4"] Sep 29 13:08:21 crc kubenswrapper[4611]: I0929 13:08:21.783234 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ltql4" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="registry-server" containerID="cri-o://7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183" gracePeriod=2 Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.029607 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-2a63-account-create-hxwzj"] Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.055005 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-2a63-account-create-hxwzj"] Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.301328 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.472734 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-utilities\") pod \"a4d1060e-4c25-4189-8717-660f2afa8d88\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.473062 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsl56\" (UniqueName: \"kubernetes.io/projected/a4d1060e-4c25-4189-8717-660f2afa8d88-kube-api-access-hsl56\") pod \"a4d1060e-4c25-4189-8717-660f2afa8d88\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.473381 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-catalog-content\") pod \"a4d1060e-4c25-4189-8717-660f2afa8d88\" (UID: \"a4d1060e-4c25-4189-8717-660f2afa8d88\") " Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.474850 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-utilities" (OuterVolumeSpecName: "utilities") pod "a4d1060e-4c25-4189-8717-660f2afa8d88" (UID: "a4d1060e-4c25-4189-8717-660f2afa8d88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.479967 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4d1060e-4c25-4189-8717-660f2afa8d88-kube-api-access-hsl56" (OuterVolumeSpecName: "kube-api-access-hsl56") pod "a4d1060e-4c25-4189-8717-660f2afa8d88" (UID: "a4d1060e-4c25-4189-8717-660f2afa8d88"). InnerVolumeSpecName "kube-api-access-hsl56". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.515942 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4d1060e-4c25-4189-8717-660f2afa8d88" (UID: "a4d1060e-4c25-4189-8717-660f2afa8d88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.575995 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.576029 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsl56\" (UniqueName: \"kubernetes.io/projected/a4d1060e-4c25-4189-8717-660f2afa8d88-kube-api-access-hsl56\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.576043 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d1060e-4c25-4189-8717-660f2afa8d88-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.796990 4611 generic.go:334] "Generic (PLEG): container finished" podID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerID="7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183" exitCode=0 Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.797158 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerDied","Data":"7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183"} Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.797793 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltql4" event={"ID":"a4d1060e-4c25-4189-8717-660f2afa8d88","Type":"ContainerDied","Data":"9ab5bc03be95b2fa8703e5e2b345d3f34550ec3742596ec843946e90648a7c49"} Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.797919 4611 scope.go:117] "RemoveContainer" containerID="7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.797301 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltql4" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.823137 4611 scope.go:117] "RemoveContainer" containerID="7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.843942 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ltql4"] Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.852034 4611 scope.go:117] "RemoveContainer" containerID="04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.855433 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ltql4"] Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.906448 4611 scope.go:117] "RemoveContainer" containerID="7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183" Sep 29 13:08:22 crc kubenswrapper[4611]: E0929 13:08:22.909397 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183\": container with ID starting with 7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183 not found: ID does not exist" containerID="7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.909458 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183"} err="failed to get container status \"7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183\": rpc error: code = NotFound desc = could not find container \"7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183\": container with ID starting with 7177707cadc805ff3335af66499c6bb0d80a180a041e3574f9d5adf7506a5183 not found: ID does not exist" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.909527 4611 scope.go:117] "RemoveContainer" containerID="7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee" Sep 29 13:08:22 crc kubenswrapper[4611]: E0929 13:08:22.910588 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee\": container with ID starting with 7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee not found: ID does not exist" containerID="7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.910804 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee"} err="failed to get container status \"7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee\": rpc error: code = NotFound desc = could not find container \"7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee\": container with ID starting with 7e6efe3596be79cc9a9084d4a09feb560036767f184780850bd44d33cc5c7bee not found: ID does not exist" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.910847 4611 scope.go:117] "RemoveContainer" containerID="04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132" Sep 29 13:08:22 crc kubenswrapper[4611]: E0929 13:08:22.911922 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132\": container with ID starting with 04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132 not found: ID does not exist" containerID="04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132" Sep 29 13:08:22 crc kubenswrapper[4611]: I0929 13:08:22.911949 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132"} err="failed to get container status \"04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132\": rpc error: code = NotFound desc = could not find container \"04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132\": container with ID starting with 04df872927a0423c945592067064025a6c5f314adeffa1264126467b864e0132 not found: ID does not exist" Sep 29 13:08:23 crc kubenswrapper[4611]: I0929 13:08:23.752242 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0991826-0ea3-46e1-91d7-995b1eeb3772" path="/var/lib/kubelet/pods/a0991826-0ea3-46e1-91d7-995b1eeb3772/volumes" Sep 29 13:08:23 crc kubenswrapper[4611]: I0929 13:08:23.754042 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" path="/var/lib/kubelet/pods/a4d1060e-4c25-4189-8717-660f2afa8d88/volumes" Sep 29 13:08:24 crc kubenswrapper[4611]: I0929 13:08:24.736731 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:08:24 crc kubenswrapper[4611]: E0929 13:08:24.738426 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:08:25 crc kubenswrapper[4611]: I0929 13:08:25.033247 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-337e-account-create-vwdf8"] Sep 29 13:08:25 crc kubenswrapper[4611]: I0929 13:08:25.042297 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-337e-account-create-vwdf8"] Sep 29 13:08:25 crc kubenswrapper[4611]: I0929 13:08:25.750064 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53965ddb-78b2-40ba-aa0b-808caee352d3" path="/var/lib/kubelet/pods/53965ddb-78b2-40ba-aa0b-808caee352d3/volumes" Sep 29 13:08:30 crc kubenswrapper[4611]: I0929 13:08:30.410119 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rlvtq" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" probeResult="failure" output=< Sep 29 13:08:30 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:08:30 crc kubenswrapper[4611]: > Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.195408 4611 scope.go:117] "RemoveContainer" containerID="f8b003c2b5a1f94fb0322f5508a8a342c8472ce5e071b045dc8d796c7bc33a1f" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.227905 4611 scope.go:117] "RemoveContainer" containerID="5a348c3cb9723c5ff8a65384e712b3d2021f9dbebec4032ca1b82b7c354f7b23" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.289804 4611 scope.go:117] "RemoveContainer" containerID="1498868025b262a8ce3c2df4700757af26e941c0c5f135461e5552464eef0eb7" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.373795 4611 scope.go:117] "RemoveContainer" containerID="80260896e036eda75e10f57be967de18b9407d131cbf352e19d2e5f822f335d3" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.411270 4611 scope.go:117] "RemoveContainer" containerID="87d09ebea5c24fe79c80f71eadd4d3db8897feb83ae141ebb36d8bdf7b466229" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.466587 4611 scope.go:117] "RemoveContainer" containerID="aacc762242ca7e1489e8960dd7b52c81c9882cbb597357c8ea66ac9bd4201bd7" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.529842 4611 scope.go:117] "RemoveContainer" containerID="7fb28917d3e5a3c4947dd9bf8a1956062e0e3417fe08106da6f9fed02de19f52" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.555664 4611 scope.go:117] "RemoveContainer" containerID="c2893b0b9f01955e7eb07304ac957b68829a00734e2f99d2e7e478f0716bab4d" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.580146 4611 scope.go:117] "RemoveContainer" containerID="0a2190557c2a56b76c791c9a3333ce65ea6a1042b9be2639f835d99552ebbb01" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.601211 4611 scope.go:117] "RemoveContainer" containerID="f8e8875a9aaf4d32509b9630ed153f8bd32c254b7de03e3485b742b1d6f30469" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.621655 4611 scope.go:117] "RemoveContainer" containerID="09beadb43ee2e338d4bbe181cd0327b44d454676f7c8d8d9f5622c6bc4320d3b" Sep 29 13:08:34 crc kubenswrapper[4611]: I0929 13:08:34.644560 4611 scope.go:117] "RemoveContainer" containerID="c4edfee9a5209657b91e78dee3c1cbf984b8e04a582b834b6ea216abbc6c1fa7" Sep 29 13:08:35 crc kubenswrapper[4611]: I0929 13:08:35.736582 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:08:35 crc kubenswrapper[4611]: E0929 13:08:35.736872 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:08:39 crc kubenswrapper[4611]: I0929 13:08:39.036389 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0fb9-account-create-fqsph"] Sep 29 13:08:39 crc kubenswrapper[4611]: I0929 13:08:39.047038 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0fb9-account-create-fqsph"] Sep 29 13:08:39 crc kubenswrapper[4611]: I0929 13:08:39.413958 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:08:39 crc kubenswrapper[4611]: I0929 13:08:39.473361 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:08:39 crc kubenswrapper[4611]: I0929 13:08:39.649763 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rlvtq"] Sep 29 13:08:39 crc kubenswrapper[4611]: I0929 13:08:39.748442 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fd4b56b-fd87-409a-a2b8-475d86973e7e" path="/var/lib/kubelet/pods/0fd4b56b-fd87-409a-a2b8-475d86973e7e/volumes" Sep 29 13:08:40 crc kubenswrapper[4611]: I0929 13:08:40.970010 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rlvtq" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" containerID="cri-o://1fea6c909c78c51a5c2de2b0e88f2bc5c0440e978e1ff6255bc68bad4c62fe8e" gracePeriod=2 Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.032812 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-08d9-account-create-4cgzx"] Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.043997 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-08d9-account-create-4cgzx"] Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.756043 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25628b46-3189-4901-8ce7-c17a4330f8b8" path="/var/lib/kubelet/pods/25628b46-3189-4901-8ce7-c17a4330f8b8/volumes" Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.983759 4611 generic.go:334] "Generic (PLEG): container finished" podID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerID="1fea6c909c78c51a5c2de2b0e88f2bc5c0440e978e1ff6255bc68bad4c62fe8e" exitCode=0 Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.983840 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerDied","Data":"1fea6c909c78c51a5c2de2b0e88f2bc5c0440e978e1ff6255bc68bad4c62fe8e"} Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.984017 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlvtq" event={"ID":"0cb25c47-aeb6-43fa-b3ab-79dc689013e0","Type":"ContainerDied","Data":"dd00c2a6c46a08e3b49eaa69978a6a209c512a3a4618eb25179b371f3dbaa175"} Sep 29 13:08:41 crc kubenswrapper[4611]: I0929 13:08:41.984030 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd00c2a6c46a08e3b49eaa69978a6a209c512a3a4618eb25179b371f3dbaa175" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.008686 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.162820 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh79g\" (UniqueName: \"kubernetes.io/projected/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-kube-api-access-gh79g\") pod \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.162963 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-catalog-content\") pod \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.163013 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-utilities\") pod \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\" (UID: \"0cb25c47-aeb6-43fa-b3ab-79dc689013e0\") " Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.164069 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-utilities" (OuterVolumeSpecName: "utilities") pod "0cb25c47-aeb6-43fa-b3ab-79dc689013e0" (UID: "0cb25c47-aeb6-43fa-b3ab-79dc689013e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.168136 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-kube-api-access-gh79g" (OuterVolumeSpecName: "kube-api-access-gh79g") pod "0cb25c47-aeb6-43fa-b3ab-79dc689013e0" (UID: "0cb25c47-aeb6-43fa-b3ab-79dc689013e0"). InnerVolumeSpecName "kube-api-access-gh79g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.248323 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cb25c47-aeb6-43fa-b3ab-79dc689013e0" (UID: "0cb25c47-aeb6-43fa-b3ab-79dc689013e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.265841 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh79g\" (UniqueName: \"kubernetes.io/projected/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-kube-api-access-gh79g\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.265888 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.265899 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cb25c47-aeb6-43fa-b3ab-79dc689013e0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:42 crc kubenswrapper[4611]: I0929 13:08:42.992798 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlvtq" Sep 29 13:08:43 crc kubenswrapper[4611]: I0929 13:08:43.028229 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rlvtq"] Sep 29 13:08:43 crc kubenswrapper[4611]: I0929 13:08:43.036322 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rlvtq"] Sep 29 13:08:43 crc kubenswrapper[4611]: I0929 13:08:43.748833 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" path="/var/lib/kubelet/pods/0cb25c47-aeb6-43fa-b3ab-79dc689013e0/volumes" Sep 29 13:08:44 crc kubenswrapper[4611]: I0929 13:08:44.003108 4611 generic.go:334] "Generic (PLEG): container finished" podID="482ab873-2d1f-421c-b3b7-ec74175ad046" containerID="0ecd1db708e45012703db07cdd4e7096a37acefe4581ff125d137bbfaa26121a" exitCode=0 Sep 29 13:08:44 crc kubenswrapper[4611]: I0929 13:08:44.003153 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" event={"ID":"482ab873-2d1f-421c-b3b7-ec74175ad046","Type":"ContainerDied","Data":"0ecd1db708e45012703db07cdd4e7096a37acefe4581ff125d137bbfaa26121a"} Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.463360 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.527183 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x75fx\" (UniqueName: \"kubernetes.io/projected/482ab873-2d1f-421c-b3b7-ec74175ad046-kube-api-access-x75fx\") pod \"482ab873-2d1f-421c-b3b7-ec74175ad046\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.527614 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-bootstrap-combined-ca-bundle\") pod \"482ab873-2d1f-421c-b3b7-ec74175ad046\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.527834 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-ssh-key\") pod \"482ab873-2d1f-421c-b3b7-ec74175ad046\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.527994 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-inventory\") pod \"482ab873-2d1f-421c-b3b7-ec74175ad046\" (UID: \"482ab873-2d1f-421c-b3b7-ec74175ad046\") " Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.533522 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "482ab873-2d1f-421c-b3b7-ec74175ad046" (UID: "482ab873-2d1f-421c-b3b7-ec74175ad046"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.534049 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/482ab873-2d1f-421c-b3b7-ec74175ad046-kube-api-access-x75fx" (OuterVolumeSpecName: "kube-api-access-x75fx") pod "482ab873-2d1f-421c-b3b7-ec74175ad046" (UID: "482ab873-2d1f-421c-b3b7-ec74175ad046"). InnerVolumeSpecName "kube-api-access-x75fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.575331 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-inventory" (OuterVolumeSpecName: "inventory") pod "482ab873-2d1f-421c-b3b7-ec74175ad046" (UID: "482ab873-2d1f-421c-b3b7-ec74175ad046"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.580843 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "482ab873-2d1f-421c-b3b7-ec74175ad046" (UID: "482ab873-2d1f-421c-b3b7-ec74175ad046"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.630484 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.630516 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.630527 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x75fx\" (UniqueName: \"kubernetes.io/projected/482ab873-2d1f-421c-b3b7-ec74175ad046-kube-api-access-x75fx\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:45 crc kubenswrapper[4611]: I0929 13:08:45.630539 4611 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ab873-2d1f-421c-b3b7-ec74175ad046-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.022662 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" event={"ID":"482ab873-2d1f-421c-b3b7-ec74175ad046","Type":"ContainerDied","Data":"5b96556a3a4176a35e0f6ea5aca746008a91c738d27bfb17683d48178367302b"} Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.022707 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b96556a3a4176a35e0f6ea5aca746008a91c738d27bfb17683d48178367302b" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.022709 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.107463 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f"] Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.107929 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.107955 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.107977 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="extract-content" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.107987 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="extract-content" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108010 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="extract-content" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108018 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="extract-content" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108034 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="extract-utilities" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108041 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="extract-utilities" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108061 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="extract-utilities" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108070 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="extract-utilities" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108087 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108095 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108106 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="extract-content" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108114 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="extract-content" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108129 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108138 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108159 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482ab873-2d1f-421c-b3b7-ec74175ad046" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108169 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="482ab873-2d1f-421c-b3b7-ec74175ad046" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 13:08:46 crc kubenswrapper[4611]: E0929 13:08:46.108189 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="extract-utilities" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108197 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="extract-utilities" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108429 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cb25c47-aeb6-43fa-b3ab-79dc689013e0" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108450 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="482ab873-2d1f-421c-b3b7-ec74175ad046" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108463 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dda749a-a8cb-4d5b-bbb0-1086eaeb79a0" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.108482 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4d1060e-4c25-4189-8717-660f2afa8d88" containerName="registry-server" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.109336 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.112185 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.112569 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.112760 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.114761 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.165000 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f"] Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.240079 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj69j\" (UniqueName: \"kubernetes.io/projected/9ed7d11c-f153-4632-bef8-b39a6bed2966-kube-api-access-nj69j\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.240394 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.240652 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.342653 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.342736 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.342794 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj69j\" (UniqueName: \"kubernetes.io/projected/9ed7d11c-f153-4632-bef8-b39a6bed2966-kube-api-access-nj69j\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.350284 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.350298 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.360243 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj69j\" (UniqueName: \"kubernetes.io/projected/9ed7d11c-f153-4632-bef8-b39a6bed2966-kube-api-access-nj69j\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:46 crc kubenswrapper[4611]: I0929 13:08:46.428545 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:08:47 crc kubenswrapper[4611]: I0929 13:08:47.086044 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f"] Sep 29 13:08:48 crc kubenswrapper[4611]: I0929 13:08:48.042447 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" event={"ID":"9ed7d11c-f153-4632-bef8-b39a6bed2966","Type":"ContainerStarted","Data":"ceea31a0af4374dba082b60329c9ce76f9a61d9252d1c1ef5bbff3c495418605"} Sep 29 13:08:48 crc kubenswrapper[4611]: I0929 13:08:48.042738 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" event={"ID":"9ed7d11c-f153-4632-bef8-b39a6bed2966","Type":"ContainerStarted","Data":"def594305e2b6054c367ccfa21830c98b2677a9b3c79818bb55de1511fcbe582"} Sep 29 13:08:48 crc kubenswrapper[4611]: I0929 13:08:48.066195 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" podStartSLOduration=1.6084903160000001 podStartE2EDuration="2.066177562s" podCreationTimestamp="2025-09-29 13:08:46 +0000 UTC" firstStartedPulling="2025-09-29 13:08:47.101848856 +0000 UTC m=+1713.993368462" lastFinishedPulling="2025-09-29 13:08:47.559536102 +0000 UTC m=+1714.451055708" observedRunningTime="2025-09-29 13:08:48.056902104 +0000 UTC m=+1714.948421710" watchObservedRunningTime="2025-09-29 13:08:48.066177562 +0000 UTC m=+1714.957697168" Sep 29 13:08:49 crc kubenswrapper[4611]: I0929 13:08:49.737074 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:08:49 crc kubenswrapper[4611]: E0929 13:08:49.737821 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:08:53 crc kubenswrapper[4611]: I0929 13:08:53.034358 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-x6848"] Sep 29 13:08:53 crc kubenswrapper[4611]: I0929 13:08:53.044749 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-x6848"] Sep 29 13:08:53 crc kubenswrapper[4611]: I0929 13:08:53.751228 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fc77f95-5def-4756-80e5-d2b044505f85" path="/var/lib/kubelet/pods/4fc77f95-5def-4756-80e5-d2b044505f85/volumes" Sep 29 13:08:55 crc kubenswrapper[4611]: I0929 13:08:55.029086 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-ljr99"] Sep 29 13:08:55 crc kubenswrapper[4611]: I0929 13:08:55.042243 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-ljr99"] Sep 29 13:08:55 crc kubenswrapper[4611]: I0929 13:08:55.747760 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88e8c098-763d-4a3b-b5e1-3f29d2b37845" path="/var/lib/kubelet/pods/88e8c098-763d-4a3b-b5e1-3f29d2b37845/volumes" Sep 29 13:09:01 crc kubenswrapper[4611]: I0929 13:09:01.736057 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:09:01 crc kubenswrapper[4611]: E0929 13:09:01.736632 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:09:15 crc kubenswrapper[4611]: I0929 13:09:15.736975 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:09:15 crc kubenswrapper[4611]: E0929 13:09:15.737934 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:09:29 crc kubenswrapper[4611]: I0929 13:09:29.736203 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:09:29 crc kubenswrapper[4611]: E0929 13:09:29.737118 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:09:32 crc kubenswrapper[4611]: I0929 13:09:32.049310 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-flx2p"] Sep 29 13:09:32 crc kubenswrapper[4611]: I0929 13:09:32.057879 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-flx2p"] Sep 29 13:09:33 crc kubenswrapper[4611]: I0929 13:09:33.747677 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81475e0c-543c-43f2-8a53-d9a1e7adcbad" path="/var/lib/kubelet/pods/81475e0c-543c-43f2-8a53-d9a1e7adcbad/volumes" Sep 29 13:09:34 crc kubenswrapper[4611]: I0929 13:09:34.905174 4611 scope.go:117] "RemoveContainer" containerID="e016544a6090ff4b99bc136bbd8f17ca099a4f5e5695fc1589b094597277cad7" Sep 29 13:09:34 crc kubenswrapper[4611]: I0929 13:09:34.929576 4611 scope.go:117] "RemoveContainer" containerID="53bcd7e7891373f9b84f84c1d1b7dd05e2ad1547224831e889dfdf07fd8f12a5" Sep 29 13:09:34 crc kubenswrapper[4611]: I0929 13:09:34.978772 4611 scope.go:117] "RemoveContainer" containerID="78c435430c5473dcf241010ab50ff5ffd14ae148fdf05d8b95a69614b7c6520d" Sep 29 13:09:35 crc kubenswrapper[4611]: I0929 13:09:35.029641 4611 scope.go:117] "RemoveContainer" containerID="ff65b0f113cc0e8eaeba161d4cc84072e42d536252574e19c149c22039fc02b9" Sep 29 13:09:35 crc kubenswrapper[4611]: I0929 13:09:35.075450 4611 scope.go:117] "RemoveContainer" containerID="490d8ae710cb50b606a9a1a23abf966ccd707159c7bc1cd7fbfdbade4c6fa3f5" Sep 29 13:09:43 crc kubenswrapper[4611]: I0929 13:09:43.744140 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:09:43 crc kubenswrapper[4611]: E0929 13:09:43.745098 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:09:49 crc kubenswrapper[4611]: I0929 13:09:49.053764 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-8jjnd"] Sep 29 13:09:49 crc kubenswrapper[4611]: I0929 13:09:49.064302 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-8jjnd"] Sep 29 13:09:49 crc kubenswrapper[4611]: I0929 13:09:49.077603 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-w8cp9"] Sep 29 13:09:49 crc kubenswrapper[4611]: I0929 13:09:49.086310 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-w8cp9"] Sep 29 13:09:49 crc kubenswrapper[4611]: I0929 13:09:49.747544 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1ff376d-1d04-41b1-a417-6de011ef3054" path="/var/lib/kubelet/pods/d1ff376d-1d04-41b1-a417-6de011ef3054/volumes" Sep 29 13:09:49 crc kubenswrapper[4611]: I0929 13:09:49.748216 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5e45783-38de-4e39-9ad8-3da9ec111aa9" path="/var/lib/kubelet/pods/f5e45783-38de-4e39-9ad8-3da9ec111aa9/volumes" Sep 29 13:09:52 crc kubenswrapper[4611]: I0929 13:09:52.028822 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-9sjsz"] Sep 29 13:09:52 crc kubenswrapper[4611]: I0929 13:09:52.037395 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-9sjsz"] Sep 29 13:09:53 crc kubenswrapper[4611]: I0929 13:09:53.747018 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc1b4a7-82e6-42fa-9117-96db1ddcda7c" path="/var/lib/kubelet/pods/adc1b4a7-82e6-42fa-9117-96db1ddcda7c/volumes" Sep 29 13:09:55 crc kubenswrapper[4611]: I0929 13:09:55.736700 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:09:55 crc kubenswrapper[4611]: E0929 13:09:55.737001 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:10:08 crc kubenswrapper[4611]: I0929 13:10:08.045227 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-nmkf9"] Sep 29 13:10:08 crc kubenswrapper[4611]: I0929 13:10:08.054184 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-nmkf9"] Sep 29 13:10:09 crc kubenswrapper[4611]: I0929 13:10:09.751735 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="578c0d20-ef6e-43d0-8290-1ec794d9a0ee" path="/var/lib/kubelet/pods/578c0d20-ef6e-43d0-8290-1ec794d9a0ee/volumes" Sep 29 13:10:10 crc kubenswrapper[4611]: I0929 13:10:10.737667 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:10:10 crc kubenswrapper[4611]: E0929 13:10:10.738376 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:10:23 crc kubenswrapper[4611]: I0929 13:10:23.746461 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:10:23 crc kubenswrapper[4611]: E0929 13:10:23.747304 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:10:35 crc kubenswrapper[4611]: I0929 13:10:35.267210 4611 scope.go:117] "RemoveContainer" containerID="de5ed9e3669155a1741ee806c28b219f73255f7bca033bc129a01ef986b8f519" Sep 29 13:10:35 crc kubenswrapper[4611]: I0929 13:10:35.325232 4611 scope.go:117] "RemoveContainer" containerID="4fed542c9764facf98667597b5c294f1861530d13cc12d72c853a618e3fecd0c" Sep 29 13:10:35 crc kubenswrapper[4611]: I0929 13:10:35.352907 4611 scope.go:117] "RemoveContainer" containerID="ca0943188ff5b89702704b6a1ca55b982bd8a550ac85bb3071accd3b8539007e" Sep 29 13:10:35 crc kubenswrapper[4611]: I0929 13:10:35.405270 4611 scope.go:117] "RemoveContainer" containerID="81ce465819ffef39bf0463e75ccb04256e692fd254c40f8f1acefc1308420fff" Sep 29 13:10:35 crc kubenswrapper[4611]: I0929 13:10:35.463016 4611 scope.go:117] "RemoveContainer" containerID="1ecd772c05171aacec2579f4c394b0e436668d00cb2aa1d83275ecf962ce9d11" Sep 29 13:10:35 crc kubenswrapper[4611]: I0929 13:10:35.515358 4611 scope.go:117] "RemoveContainer" containerID="995a252886b9af34197d378a57203af56537bbb7c62567c8b57155cb153eaa65" Sep 29 13:10:37 crc kubenswrapper[4611]: I0929 13:10:37.737498 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:10:37 crc kubenswrapper[4611]: E0929 13:10:37.738416 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:10:48 crc kubenswrapper[4611]: I0929 13:10:48.736762 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:10:48 crc kubenswrapper[4611]: E0929 13:10:48.737661 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:10:55 crc kubenswrapper[4611]: I0929 13:10:55.051643 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-fvkzh"] Sep 29 13:10:55 crc kubenswrapper[4611]: I0929 13:10:55.061378 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-655h7"] Sep 29 13:10:55 crc kubenswrapper[4611]: I0929 13:10:55.071269 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-fvkzh"] Sep 29 13:10:55 crc kubenswrapper[4611]: I0929 13:10:55.080153 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-655h7"] Sep 29 13:10:55 crc kubenswrapper[4611]: I0929 13:10:55.749920 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ede43e6-1802-4389-8a47-ae78b16d3144" path="/var/lib/kubelet/pods/9ede43e6-1802-4389-8a47-ae78b16d3144/volumes" Sep 29 13:10:55 crc kubenswrapper[4611]: I0929 13:10:55.751210 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abe4e13c-dc14-4649-896d-23ad11daedd5" path="/var/lib/kubelet/pods/abe4e13c-dc14-4649-896d-23ad11daedd5/volumes" Sep 29 13:10:57 crc kubenswrapper[4611]: I0929 13:10:57.042523 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-5mdd9"] Sep 29 13:10:57 crc kubenswrapper[4611]: I0929 13:10:57.054067 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-5mdd9"] Sep 29 13:10:57 crc kubenswrapper[4611]: I0929 13:10:57.746996 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0597e5b-7c6b-465d-b298-0f72aa28e514" path="/var/lib/kubelet/pods/d0597e5b-7c6b-465d-b298-0f72aa28e514/volumes" Sep 29 13:11:03 crc kubenswrapper[4611]: I0929 13:11:03.744837 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:11:03 crc kubenswrapper[4611]: E0929 13:11:03.745709 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:11:05 crc kubenswrapper[4611]: I0929 13:11:05.357703 4611 generic.go:334] "Generic (PLEG): container finished" podID="9ed7d11c-f153-4632-bef8-b39a6bed2966" containerID="ceea31a0af4374dba082b60329c9ce76f9a61d9252d1c1ef5bbff3c495418605" exitCode=0 Sep 29 13:11:05 crc kubenswrapper[4611]: I0929 13:11:05.357770 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" event={"ID":"9ed7d11c-f153-4632-bef8-b39a6bed2966","Type":"ContainerDied","Data":"ceea31a0af4374dba082b60329c9ce76f9a61d9252d1c1ef5bbff3c495418605"} Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.027939 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-cd48-account-create-rspn7"] Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.035381 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-cd48-account-create-rspn7"] Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.793603 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.927143 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-ssh-key\") pod \"9ed7d11c-f153-4632-bef8-b39a6bed2966\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.927291 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj69j\" (UniqueName: \"kubernetes.io/projected/9ed7d11c-f153-4632-bef8-b39a6bed2966-kube-api-access-nj69j\") pod \"9ed7d11c-f153-4632-bef8-b39a6bed2966\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.927430 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-inventory\") pod \"9ed7d11c-f153-4632-bef8-b39a6bed2966\" (UID: \"9ed7d11c-f153-4632-bef8-b39a6bed2966\") " Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.935044 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ed7d11c-f153-4632-bef8-b39a6bed2966-kube-api-access-nj69j" (OuterVolumeSpecName: "kube-api-access-nj69j") pod "9ed7d11c-f153-4632-bef8-b39a6bed2966" (UID: "9ed7d11c-f153-4632-bef8-b39a6bed2966"). InnerVolumeSpecName "kube-api-access-nj69j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.958942 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9ed7d11c-f153-4632-bef8-b39a6bed2966" (UID: "9ed7d11c-f153-4632-bef8-b39a6bed2966"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:11:06 crc kubenswrapper[4611]: I0929 13:11:06.968482 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-inventory" (OuterVolumeSpecName: "inventory") pod "9ed7d11c-f153-4632-bef8-b39a6bed2966" (UID: "9ed7d11c-f153-4632-bef8-b39a6bed2966"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.030105 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.030741 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed7d11c-f153-4632-bef8-b39a6bed2966-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.030758 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj69j\" (UniqueName: \"kubernetes.io/projected/9ed7d11c-f153-4632-bef8-b39a6bed2966-kube-api-access-nj69j\") on node \"crc\" DevicePath \"\"" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.045550 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cddd-account-create-g6wtp"] Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.054048 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4946-account-create-r9w2d"] Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.064740 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cddd-account-create-g6wtp"] Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.075661 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4946-account-create-r9w2d"] Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.379322 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" event={"ID":"9ed7d11c-f153-4632-bef8-b39a6bed2966","Type":"ContainerDied","Data":"def594305e2b6054c367ccfa21830c98b2677a9b3c79818bb55de1511fcbe582"} Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.379813 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="def594305e2b6054c367ccfa21830c98b2677a9b3c79818bb55de1511fcbe582" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.379415 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.475278 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj"] Sep 29 13:11:07 crc kubenswrapper[4611]: E0929 13:11:07.475807 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ed7d11c-f153-4632-bef8-b39a6bed2966" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.475830 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ed7d11c-f153-4632-bef8-b39a6bed2966" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.476036 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ed7d11c-f153-4632-bef8-b39a6bed2966" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.476861 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.480782 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.480938 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.482058 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.483274 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.500736 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj"] Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.645055 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.645729 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.645864 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gq7r\" (UniqueName: \"kubernetes.io/projected/6bc80e04-ce3c-485c-b9a0-138366726186-kube-api-access-4gq7r\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.748088 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gq7r\" (UniqueName: \"kubernetes.io/projected/6bc80e04-ce3c-485c-b9a0-138366726186-kube-api-access-4gq7r\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.748186 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.748284 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.752056 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47543a39-6232-4860-a9d9-20654c865434" path="/var/lib/kubelet/pods/47543a39-6232-4860-a9d9-20654c865434/volumes" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.753076 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f77cbf5-b67a-4cd2-9ccf-7c29988e541e" path="/var/lib/kubelet/pods/8f77cbf5-b67a-4cd2-9ccf-7c29988e541e/volumes" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.754283 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.754692 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.764790 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9319ecc4-d8b0-495c-8335-c483325d02a3" path="/var/lib/kubelet/pods/9319ecc4-d8b0-495c-8335-c483325d02a3/volumes" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.768006 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gq7r\" (UniqueName: \"kubernetes.io/projected/6bc80e04-ce3c-485c-b9a0-138366726186-kube-api-access-4gq7r\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:07 crc kubenswrapper[4611]: I0929 13:11:07.793552 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:11:08 crc kubenswrapper[4611]: I0929 13:11:08.387197 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj"] Sep 29 13:11:08 crc kubenswrapper[4611]: I0929 13:11:08.396969 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" event={"ID":"6bc80e04-ce3c-485c-b9a0-138366726186","Type":"ContainerStarted","Data":"66fd975106aac03c7d47ff52ba7b643dbbb338997837d94a58d59b9adcfdf9ec"} Sep 29 13:11:09 crc kubenswrapper[4611]: I0929 13:11:09.408531 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" event={"ID":"6bc80e04-ce3c-485c-b9a0-138366726186","Type":"ContainerStarted","Data":"d28011a5ca903d0d9c0134c8cf18326804ef921fc4a29e7c0b37e46137058ae8"} Sep 29 13:11:09 crc kubenswrapper[4611]: I0929 13:11:09.442607 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" podStartSLOduration=1.95033201 podStartE2EDuration="2.442564944s" podCreationTimestamp="2025-09-29 13:11:07 +0000 UTC" firstStartedPulling="2025-09-29 13:11:08.38801453 +0000 UTC m=+1855.279534136" lastFinishedPulling="2025-09-29 13:11:08.880247444 +0000 UTC m=+1855.771767070" observedRunningTime="2025-09-29 13:11:09.426997204 +0000 UTC m=+1856.318516810" watchObservedRunningTime="2025-09-29 13:11:09.442564944 +0000 UTC m=+1856.334084550" Sep 29 13:11:17 crc kubenswrapper[4611]: I0929 13:11:17.737757 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:11:17 crc kubenswrapper[4611]: E0929 13:11:17.739486 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:11:28 crc kubenswrapper[4611]: I0929 13:11:28.736976 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:11:28 crc kubenswrapper[4611]: E0929 13:11:28.737729 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:11:35 crc kubenswrapper[4611]: I0929 13:11:35.655413 4611 scope.go:117] "RemoveContainer" containerID="0194ea8eacd6e3968d8010c9ecd3ecd3282d128c3576e8b3345cf6486f58d25d" Sep 29 13:11:35 crc kubenswrapper[4611]: I0929 13:11:35.705638 4611 scope.go:117] "RemoveContainer" containerID="9221d4e9bd21fd8048c1ebbf45a6298602eb0a39036e3ab2634cbc1125e44971" Sep 29 13:11:35 crc kubenswrapper[4611]: I0929 13:11:35.751763 4611 scope.go:117] "RemoveContainer" containerID="290a48b9235c62eb9ce12006d08061fbf254a3a7fa9c3886aa765f4485da33ca" Sep 29 13:11:35 crc kubenswrapper[4611]: I0929 13:11:35.785534 4611 scope.go:117] "RemoveContainer" containerID="71980fcb542e9851741602fcd25f0720ba0f1e8ca272a650ae941043542cb0f6" Sep 29 13:11:35 crc kubenswrapper[4611]: I0929 13:11:35.820780 4611 scope.go:117] "RemoveContainer" containerID="3cc87c07cffb219c38e6beb91488255ac87f5a6458252dda7871530c5d1e0992" Sep 29 13:11:35 crc kubenswrapper[4611]: I0929 13:11:35.872875 4611 scope.go:117] "RemoveContainer" containerID="c388c83e45b33aee3b2838adb7d33d73d56bf833acff7dabbadf8a1e06805fcd" Sep 29 13:11:41 crc kubenswrapper[4611]: I0929 13:11:41.737301 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:11:41 crc kubenswrapper[4611]: E0929 13:11:41.738135 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:11:47 crc kubenswrapper[4611]: I0929 13:11:47.054545 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-njndr"] Sep 29 13:11:47 crc kubenswrapper[4611]: I0929 13:11:47.064635 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-njndr"] Sep 29 13:11:47 crc kubenswrapper[4611]: I0929 13:11:47.752267 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15aa0847-f436-4999-9622-92c588953523" path="/var/lib/kubelet/pods/15aa0847-f436-4999-9622-92c588953523/volumes" Sep 29 13:11:52 crc kubenswrapper[4611]: I0929 13:11:52.737650 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:11:52 crc kubenswrapper[4611]: E0929 13:11:52.738746 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:12:03 crc kubenswrapper[4611]: I0929 13:12:03.743735 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:12:03 crc kubenswrapper[4611]: E0929 13:12:03.745062 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:12:15 crc kubenswrapper[4611]: I0929 13:12:15.737083 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:12:16 crc kubenswrapper[4611]: I0929 13:12:16.069172 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"07a8b1ce6659f69a7f8378fefa7226292ee4c44508f137640082c9911fcec9a1"} Sep 29 13:12:24 crc kubenswrapper[4611]: I0929 13:12:24.044899 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-nlhnf"] Sep 29 13:12:24 crc kubenswrapper[4611]: I0929 13:12:24.061176 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-nlhnf"] Sep 29 13:12:25 crc kubenswrapper[4611]: I0929 13:12:25.750703 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7ea831d-4481-47e7-a96b-f3cee6f1d00c" path="/var/lib/kubelet/pods/a7ea831d-4481-47e7-a96b-f3cee6f1d00c/volumes" Sep 29 13:12:30 crc kubenswrapper[4611]: I0929 13:12:30.218026 4611 generic.go:334] "Generic (PLEG): container finished" podID="6bc80e04-ce3c-485c-b9a0-138366726186" containerID="d28011a5ca903d0d9c0134c8cf18326804ef921fc4a29e7c0b37e46137058ae8" exitCode=0 Sep 29 13:12:30 crc kubenswrapper[4611]: I0929 13:12:30.218166 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" event={"ID":"6bc80e04-ce3c-485c-b9a0-138366726186","Type":"ContainerDied","Data":"d28011a5ca903d0d9c0134c8cf18326804ef921fc4a29e7c0b37e46137058ae8"} Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.037055 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fdss8"] Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.048563 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fdss8"] Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.752106 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30289ed8-4668-459a-9e89-698bea27c2f0" path="/var/lib/kubelet/pods/30289ed8-4668-459a-9e89-698bea27c2f0/volumes" Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.801804 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.917188 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-inventory\") pod \"6bc80e04-ce3c-485c-b9a0-138366726186\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.917347 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-ssh-key\") pod \"6bc80e04-ce3c-485c-b9a0-138366726186\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.917404 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gq7r\" (UniqueName: \"kubernetes.io/projected/6bc80e04-ce3c-485c-b9a0-138366726186-kube-api-access-4gq7r\") pod \"6bc80e04-ce3c-485c-b9a0-138366726186\" (UID: \"6bc80e04-ce3c-485c-b9a0-138366726186\") " Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.950578 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bc80e04-ce3c-485c-b9a0-138366726186-kube-api-access-4gq7r" (OuterVolumeSpecName: "kube-api-access-4gq7r") pod "6bc80e04-ce3c-485c-b9a0-138366726186" (UID: "6bc80e04-ce3c-485c-b9a0-138366726186"). InnerVolumeSpecName "kube-api-access-4gq7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.967753 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-inventory" (OuterVolumeSpecName: "inventory") pod "6bc80e04-ce3c-485c-b9a0-138366726186" (UID: "6bc80e04-ce3c-485c-b9a0-138366726186"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:12:31 crc kubenswrapper[4611]: I0929 13:12:31.968815 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6bc80e04-ce3c-485c-b9a0-138366726186" (UID: "6bc80e04-ce3c-485c-b9a0-138366726186"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.021458 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.021511 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bc80e04-ce3c-485c-b9a0-138366726186-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.021524 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gq7r\" (UniqueName: \"kubernetes.io/projected/6bc80e04-ce3c-485c-b9a0-138366726186-kube-api-access-4gq7r\") on node \"crc\" DevicePath \"\"" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.247744 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" event={"ID":"6bc80e04-ce3c-485c-b9a0-138366726186","Type":"ContainerDied","Data":"66fd975106aac03c7d47ff52ba7b643dbbb338997837d94a58d59b9adcfdf9ec"} Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.247802 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66fd975106aac03c7d47ff52ba7b643dbbb338997837d94a58d59b9adcfdf9ec" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.247879 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.363966 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln"] Sep 29 13:12:32 crc kubenswrapper[4611]: E0929 13:12:32.364909 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc80e04-ce3c-485c-b9a0-138366726186" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.364935 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc80e04-ce3c-485c-b9a0-138366726186" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.365150 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bc80e04-ce3c-485c-b9a0-138366726186" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.366050 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.375777 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln"] Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.376385 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.376573 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.377658 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.381114 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.538846 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.539088 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2xmz\" (UniqueName: \"kubernetes.io/projected/51d1eee6-844a-4026-a4c8-8bc1ec752b77-kube-api-access-l2xmz\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.539185 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.640670 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2xmz\" (UniqueName: \"kubernetes.io/projected/51d1eee6-844a-4026-a4c8-8bc1ec752b77-kube-api-access-l2xmz\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.640751 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.640804 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.648924 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.649481 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.667725 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2xmz\" (UniqueName: \"kubernetes.io/projected/51d1eee6-844a-4026-a4c8-8bc1ec752b77-kube-api-access-l2xmz\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gtjln\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:32 crc kubenswrapper[4611]: I0929 13:12:32.694371 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:33 crc kubenswrapper[4611]: I0929 13:12:33.321950 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln"] Sep 29 13:12:34 crc kubenswrapper[4611]: I0929 13:12:34.270365 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" event={"ID":"51d1eee6-844a-4026-a4c8-8bc1ec752b77","Type":"ContainerStarted","Data":"9b6f0d15e474a489658661240c3d8945ba6fdc032124b1005661111d44273dc1"} Sep 29 13:12:35 crc kubenswrapper[4611]: I0929 13:12:35.279908 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" event={"ID":"51d1eee6-844a-4026-a4c8-8bc1ec752b77","Type":"ContainerStarted","Data":"06578c3171c3dcf29dc9b4ee3e748b9af92fea579f6161d0586689875f7d0997"} Sep 29 13:12:35 crc kubenswrapper[4611]: I0929 13:12:35.301702 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" podStartSLOduration=2.62078787 podStartE2EDuration="3.301681596s" podCreationTimestamp="2025-09-29 13:12:32 +0000 UTC" firstStartedPulling="2025-09-29 13:12:33.333322844 +0000 UTC m=+1940.224842450" lastFinishedPulling="2025-09-29 13:12:34.01421657 +0000 UTC m=+1940.905736176" observedRunningTime="2025-09-29 13:12:35.299110521 +0000 UTC m=+1942.190630137" watchObservedRunningTime="2025-09-29 13:12:35.301681596 +0000 UTC m=+1942.193201202" Sep 29 13:12:36 crc kubenswrapper[4611]: I0929 13:12:36.071345 4611 scope.go:117] "RemoveContainer" containerID="50120a6644c4bbfbbd2b699e457f564c98197c6cfa7ffb3e367585d239e3f6e9" Sep 29 13:12:36 crc kubenswrapper[4611]: I0929 13:12:36.127703 4611 scope.go:117] "RemoveContainer" containerID="3b50f3e4a3e20ff1a194c72479ba28e67f37ed5ae96e7f2a964ba9e8ef5e2643" Sep 29 13:12:36 crc kubenswrapper[4611]: I0929 13:12:36.216852 4611 scope.go:117] "RemoveContainer" containerID="028d5c67557afc2c896e97de914d5a106bec8c6d0751b728381a5a09c751105e" Sep 29 13:12:40 crc kubenswrapper[4611]: I0929 13:12:40.331189 4611 generic.go:334] "Generic (PLEG): container finished" podID="51d1eee6-844a-4026-a4c8-8bc1ec752b77" containerID="06578c3171c3dcf29dc9b4ee3e748b9af92fea579f6161d0586689875f7d0997" exitCode=0 Sep 29 13:12:40 crc kubenswrapper[4611]: I0929 13:12:40.331265 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" event={"ID":"51d1eee6-844a-4026-a4c8-8bc1ec752b77","Type":"ContainerDied","Data":"06578c3171c3dcf29dc9b4ee3e748b9af92fea579f6161d0586689875f7d0997"} Sep 29 13:12:41 crc kubenswrapper[4611]: I0929 13:12:41.983315 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.072054 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-inventory\") pod \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.072508 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2xmz\" (UniqueName: \"kubernetes.io/projected/51d1eee6-844a-4026-a4c8-8bc1ec752b77-kube-api-access-l2xmz\") pod \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.072537 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-ssh-key\") pod \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\" (UID: \"51d1eee6-844a-4026-a4c8-8bc1ec752b77\") " Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.081038 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51d1eee6-844a-4026-a4c8-8bc1ec752b77-kube-api-access-l2xmz" (OuterVolumeSpecName: "kube-api-access-l2xmz") pod "51d1eee6-844a-4026-a4c8-8bc1ec752b77" (UID: "51d1eee6-844a-4026-a4c8-8bc1ec752b77"). InnerVolumeSpecName "kube-api-access-l2xmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.133695 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-inventory" (OuterVolumeSpecName: "inventory") pod "51d1eee6-844a-4026-a4c8-8bc1ec752b77" (UID: "51d1eee6-844a-4026-a4c8-8bc1ec752b77"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.160490 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "51d1eee6-844a-4026-a4c8-8bc1ec752b77" (UID: "51d1eee6-844a-4026-a4c8-8bc1ec752b77"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.176531 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2xmz\" (UniqueName: \"kubernetes.io/projected/51d1eee6-844a-4026-a4c8-8bc1ec752b77-kube-api-access-l2xmz\") on node \"crc\" DevicePath \"\"" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.176585 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.176601 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51d1eee6-844a-4026-a4c8-8bc1ec752b77-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.353275 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" event={"ID":"51d1eee6-844a-4026-a4c8-8bc1ec752b77","Type":"ContainerDied","Data":"9b6f0d15e474a489658661240c3d8945ba6fdc032124b1005661111d44273dc1"} Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.353322 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b6f0d15e474a489658661240c3d8945ba6fdc032124b1005661111d44273dc1" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.353392 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gtjln" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.459503 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f"] Sep 29 13:12:42 crc kubenswrapper[4611]: E0929 13:12:42.468192 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d1eee6-844a-4026-a4c8-8bc1ec752b77" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.468231 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d1eee6-844a-4026-a4c8-8bc1ec752b77" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.468471 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d1eee6-844a-4026-a4c8-8bc1ec752b77" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.469247 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.475270 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.477371 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f"] Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.477556 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.477960 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.510297 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.585523 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.587255 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br8f2\" (UniqueName: \"kubernetes.io/projected/11cbf559-05e0-4671-b794-f2325cb752a9-kube-api-access-br8f2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.587411 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.690012 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br8f2\" (UniqueName: \"kubernetes.io/projected/11cbf559-05e0-4671-b794-f2325cb752a9-kube-api-access-br8f2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.690116 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.690216 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.695079 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.695095 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.710429 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br8f2\" (UniqueName: \"kubernetes.io/projected/11cbf559-05e0-4671-b794-f2325cb752a9-kube-api-access-br8f2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gmx5f\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:42 crc kubenswrapper[4611]: I0929 13:12:42.799713 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:12:43 crc kubenswrapper[4611]: I0929 13:12:43.492461 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f"] Sep 29 13:12:44 crc kubenswrapper[4611]: I0929 13:12:44.381871 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" event={"ID":"11cbf559-05e0-4671-b794-f2325cb752a9","Type":"ContainerStarted","Data":"6d8a60948a59e08401f1ce557497ea54c7688bc486a844fd2b1ec6c94cc83ef6"} Sep 29 13:12:44 crc kubenswrapper[4611]: I0929 13:12:44.382402 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" event={"ID":"11cbf559-05e0-4671-b794-f2325cb752a9","Type":"ContainerStarted","Data":"45b74c960e8e91293a1a812dd67db6edf8830868313c5da618cb2b5511618bfa"} Sep 29 13:12:44 crc kubenswrapper[4611]: I0929 13:12:44.428390 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" podStartSLOduration=2.019033017 podStartE2EDuration="2.428370576s" podCreationTimestamp="2025-09-29 13:12:42 +0000 UTC" firstStartedPulling="2025-09-29 13:12:43.500108472 +0000 UTC m=+1950.391628078" lastFinishedPulling="2025-09-29 13:12:43.909446011 +0000 UTC m=+1950.800965637" observedRunningTime="2025-09-29 13:12:44.423128055 +0000 UTC m=+1951.314647661" watchObservedRunningTime="2025-09-29 13:12:44.428370576 +0000 UTC m=+1951.319890182" Sep 29 13:13:07 crc kubenswrapper[4611]: I0929 13:13:07.089338 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rsn2k"] Sep 29 13:13:07 crc kubenswrapper[4611]: I0929 13:13:07.100687 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rsn2k"] Sep 29 13:13:07 crc kubenswrapper[4611]: I0929 13:13:07.795478 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2702802-11e4-4903-9943-fb74e6f7b756" path="/var/lib/kubelet/pods/a2702802-11e4-4903-9943-fb74e6f7b756/volumes" Sep 29 13:13:23 crc kubenswrapper[4611]: I0929 13:13:23.746846 4611 generic.go:334] "Generic (PLEG): container finished" podID="11cbf559-05e0-4671-b794-f2325cb752a9" containerID="6d8a60948a59e08401f1ce557497ea54c7688bc486a844fd2b1ec6c94cc83ef6" exitCode=0 Sep 29 13:13:23 crc kubenswrapper[4611]: I0929 13:13:23.749296 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" event={"ID":"11cbf559-05e0-4671-b794-f2325cb752a9","Type":"ContainerDied","Data":"6d8a60948a59e08401f1ce557497ea54c7688bc486a844fd2b1ec6c94cc83ef6"} Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.207582 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.295424 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br8f2\" (UniqueName: \"kubernetes.io/projected/11cbf559-05e0-4671-b794-f2325cb752a9-kube-api-access-br8f2\") pod \"11cbf559-05e0-4671-b794-f2325cb752a9\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.295543 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-ssh-key\") pod \"11cbf559-05e0-4671-b794-f2325cb752a9\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.295608 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-inventory\") pod \"11cbf559-05e0-4671-b794-f2325cb752a9\" (UID: \"11cbf559-05e0-4671-b794-f2325cb752a9\") " Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.302067 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cbf559-05e0-4671-b794-f2325cb752a9-kube-api-access-br8f2" (OuterVolumeSpecName: "kube-api-access-br8f2") pod "11cbf559-05e0-4671-b794-f2325cb752a9" (UID: "11cbf559-05e0-4671-b794-f2325cb752a9"). InnerVolumeSpecName "kube-api-access-br8f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.324943 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-inventory" (OuterVolumeSpecName: "inventory") pod "11cbf559-05e0-4671-b794-f2325cb752a9" (UID: "11cbf559-05e0-4671-b794-f2325cb752a9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.328928 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "11cbf559-05e0-4671-b794-f2325cb752a9" (UID: "11cbf559-05e0-4671-b794-f2325cb752a9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.399456 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.399673 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br8f2\" (UniqueName: \"kubernetes.io/projected/11cbf559-05e0-4671-b794-f2325cb752a9-kube-api-access-br8f2\") on node \"crc\" DevicePath \"\"" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.399695 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11cbf559-05e0-4671-b794-f2325cb752a9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.767951 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" event={"ID":"11cbf559-05e0-4671-b794-f2325cb752a9","Type":"ContainerDied","Data":"45b74c960e8e91293a1a812dd67db6edf8830868313c5da618cb2b5511618bfa"} Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.767995 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45b74c960e8e91293a1a812dd67db6edf8830868313c5da618cb2b5511618bfa" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.768068 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gmx5f" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.869614 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh"] Sep 29 13:13:25 crc kubenswrapper[4611]: E0929 13:13:25.870031 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cbf559-05e0-4671-b794-f2325cb752a9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.870049 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cbf559-05e0-4671-b794-f2325cb752a9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.870251 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cbf559-05e0-4671-b794-f2325cb752a9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.870907 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.874282 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.874450 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.874535 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.874672 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.888928 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh"] Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.924727 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdl7m\" (UniqueName: \"kubernetes.io/projected/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-kube-api-access-cdl7m\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.925117 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:25 crc kubenswrapper[4611]: I0929 13:13:25.927380 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.029776 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.030288 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdl7m\" (UniqueName: \"kubernetes.io/projected/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-kube-api-access-cdl7m\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.030380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.036201 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.038189 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.052968 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdl7m\" (UniqueName: \"kubernetes.io/projected/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-kube-api-access-cdl7m\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.223516 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.762982 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh"] Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.767219 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:13:26 crc kubenswrapper[4611]: I0929 13:13:26.779943 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" event={"ID":"2c0b90a0-54ca-47c2-a45e-6ce50bd04061","Type":"ContainerStarted","Data":"190f2145f80487cd45fa89efaa90d769855f6fcaf6b88a555fba8a0eb94e3940"} Sep 29 13:13:27 crc kubenswrapper[4611]: I0929 13:13:27.794940 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" event={"ID":"2c0b90a0-54ca-47c2-a45e-6ce50bd04061","Type":"ContainerStarted","Data":"348208eb13e958f0efc4c67370c0d92350d0dcfcf03441210a595ba7b9c0308f"} Sep 29 13:13:27 crc kubenswrapper[4611]: I0929 13:13:27.823825 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" podStartSLOduration=2.365343052 podStartE2EDuration="2.82380335s" podCreationTimestamp="2025-09-29 13:13:25 +0000 UTC" firstStartedPulling="2025-09-29 13:13:26.767004691 +0000 UTC m=+1993.658524297" lastFinishedPulling="2025-09-29 13:13:27.225464989 +0000 UTC m=+1994.116984595" observedRunningTime="2025-09-29 13:13:27.816245191 +0000 UTC m=+1994.707764807" watchObservedRunningTime="2025-09-29 13:13:27.82380335 +0000 UTC m=+1994.715322956" Sep 29 13:13:36 crc kubenswrapper[4611]: I0929 13:13:36.310104 4611 scope.go:117] "RemoveContainer" containerID="fdf66591eb64ed2281de39648f1a088fb86c5ff83666f43b19a5ddd508b4c393" Sep 29 13:14:20 crc kubenswrapper[4611]: I0929 13:14:20.355644 4611 generic.go:334] "Generic (PLEG): container finished" podID="2c0b90a0-54ca-47c2-a45e-6ce50bd04061" containerID="348208eb13e958f0efc4c67370c0d92350d0dcfcf03441210a595ba7b9c0308f" exitCode=0 Sep 29 13:14:20 crc kubenswrapper[4611]: I0929 13:14:20.355920 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" event={"ID":"2c0b90a0-54ca-47c2-a45e-6ce50bd04061","Type":"ContainerDied","Data":"348208eb13e958f0efc4c67370c0d92350d0dcfcf03441210a595ba7b9c0308f"} Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.807661 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.827455 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-ssh-key\") pod \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.827513 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-inventory\") pod \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.827659 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdl7m\" (UniqueName: \"kubernetes.io/projected/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-kube-api-access-cdl7m\") pod \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\" (UID: \"2c0b90a0-54ca-47c2-a45e-6ce50bd04061\") " Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.845975 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-kube-api-access-cdl7m" (OuterVolumeSpecName: "kube-api-access-cdl7m") pod "2c0b90a0-54ca-47c2-a45e-6ce50bd04061" (UID: "2c0b90a0-54ca-47c2-a45e-6ce50bd04061"). InnerVolumeSpecName "kube-api-access-cdl7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.865329 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-inventory" (OuterVolumeSpecName: "inventory") pod "2c0b90a0-54ca-47c2-a45e-6ce50bd04061" (UID: "2c0b90a0-54ca-47c2-a45e-6ce50bd04061"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.871834 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2c0b90a0-54ca-47c2-a45e-6ce50bd04061" (UID: "2c0b90a0-54ca-47c2-a45e-6ce50bd04061"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.930458 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdl7m\" (UniqueName: \"kubernetes.io/projected/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-kube-api-access-cdl7m\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.930499 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:21 crc kubenswrapper[4611]: I0929 13:14:21.930509 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c0b90a0-54ca-47c2-a45e-6ce50bd04061-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.386190 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" event={"ID":"2c0b90a0-54ca-47c2-a45e-6ce50bd04061","Type":"ContainerDied","Data":"190f2145f80487cd45fa89efaa90d769855f6fcaf6b88a555fba8a0eb94e3940"} Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.386255 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="190f2145f80487cd45fa89efaa90d769855f6fcaf6b88a555fba8a0eb94e3940" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.386592 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.500406 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d57fs"] Sep 29 13:14:22 crc kubenswrapper[4611]: E0929 13:14:22.500982 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c0b90a0-54ca-47c2-a45e-6ce50bd04061" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.501003 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c0b90a0-54ca-47c2-a45e-6ce50bd04061" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.501187 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c0b90a0-54ca-47c2-a45e-6ce50bd04061" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.501955 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.505768 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.505962 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.506209 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.506461 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.514274 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d57fs"] Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.541489 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcvrv\" (UniqueName: \"kubernetes.io/projected/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-kube-api-access-kcvrv\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.541699 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.541783 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.643328 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.643694 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.643872 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcvrv\" (UniqueName: \"kubernetes.io/projected/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-kube-api-access-kcvrv\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.649093 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.652781 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.664012 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcvrv\" (UniqueName: \"kubernetes.io/projected/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-kube-api-access-kcvrv\") pod \"ssh-known-hosts-edpm-deployment-d57fs\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:22 crc kubenswrapper[4611]: I0929 13:14:22.827472 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:23 crc kubenswrapper[4611]: I0929 13:14:23.434587 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d57fs"] Sep 29 13:14:23 crc kubenswrapper[4611]: W0929 13:14:23.452138 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0edd42a5_c566_4c3d_a2d9_6d9568c5396d.slice/crio-cbc13ee6d091d4738b767ffcb35e7dfe0f7f95b773b54e47f548bf7e3dcbfc8e WatchSource:0}: Error finding container cbc13ee6d091d4738b767ffcb35e7dfe0f7f95b773b54e47f548bf7e3dcbfc8e: Status 404 returned error can't find the container with id cbc13ee6d091d4738b767ffcb35e7dfe0f7f95b773b54e47f548bf7e3dcbfc8e Sep 29 13:14:24 crc kubenswrapper[4611]: I0929 13:14:24.410598 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" event={"ID":"0edd42a5-c566-4c3d-a2d9-6d9568c5396d","Type":"ContainerStarted","Data":"457b010b9c728c31db16016d8b0a218b88f2b25afc990d6cff142caeaf014f70"} Sep 29 13:14:24 crc kubenswrapper[4611]: I0929 13:14:24.410951 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" event={"ID":"0edd42a5-c566-4c3d-a2d9-6d9568c5396d","Type":"ContainerStarted","Data":"cbc13ee6d091d4738b767ffcb35e7dfe0f7f95b773b54e47f548bf7e3dcbfc8e"} Sep 29 13:14:24 crc kubenswrapper[4611]: I0929 13:14:24.445999 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" podStartSLOduration=1.9936719269999998 podStartE2EDuration="2.445975508s" podCreationTimestamp="2025-09-29 13:14:22 +0000 UTC" firstStartedPulling="2025-09-29 13:14:23.455335471 +0000 UTC m=+2050.346855077" lastFinishedPulling="2025-09-29 13:14:23.907639052 +0000 UTC m=+2050.799158658" observedRunningTime="2025-09-29 13:14:24.42596583 +0000 UTC m=+2051.317485486" watchObservedRunningTime="2025-09-29 13:14:24.445975508 +0000 UTC m=+2051.337495114" Sep 29 13:14:31 crc kubenswrapper[4611]: I0929 13:14:31.497885 4611 generic.go:334] "Generic (PLEG): container finished" podID="0edd42a5-c566-4c3d-a2d9-6d9568c5396d" containerID="457b010b9c728c31db16016d8b0a218b88f2b25afc990d6cff142caeaf014f70" exitCode=0 Sep 29 13:14:31 crc kubenswrapper[4611]: I0929 13:14:31.497959 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" event={"ID":"0edd42a5-c566-4c3d-a2d9-6d9568c5396d","Type":"ContainerDied","Data":"457b010b9c728c31db16016d8b0a218b88f2b25afc990d6cff142caeaf014f70"} Sep 29 13:14:32 crc kubenswrapper[4611]: I0929 13:14:32.963779 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.088397 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-ssh-key-openstack-edpm-ipam\") pod \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.088521 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-inventory-0\") pod \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.088578 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcvrv\" (UniqueName: \"kubernetes.io/projected/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-kube-api-access-kcvrv\") pod \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\" (UID: \"0edd42a5-c566-4c3d-a2d9-6d9568c5396d\") " Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.095244 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-kube-api-access-kcvrv" (OuterVolumeSpecName: "kube-api-access-kcvrv") pod "0edd42a5-c566-4c3d-a2d9-6d9568c5396d" (UID: "0edd42a5-c566-4c3d-a2d9-6d9568c5396d"). InnerVolumeSpecName "kube-api-access-kcvrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.119228 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0edd42a5-c566-4c3d-a2d9-6d9568c5396d" (UID: "0edd42a5-c566-4c3d-a2d9-6d9568c5396d"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.122762 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0edd42a5-c566-4c3d-a2d9-6d9568c5396d" (UID: "0edd42a5-c566-4c3d-a2d9-6d9568c5396d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.191840 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.191881 4611 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.191891 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcvrv\" (UniqueName: \"kubernetes.io/projected/0edd42a5-c566-4c3d-a2d9-6d9568c5396d-kube-api-access-kcvrv\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.536856 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" event={"ID":"0edd42a5-c566-4c3d-a2d9-6d9568c5396d","Type":"ContainerDied","Data":"cbc13ee6d091d4738b767ffcb35e7dfe0f7f95b773b54e47f548bf7e3dcbfc8e"} Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.537362 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbc13ee6d091d4738b767ffcb35e7dfe0f7f95b773b54e47f548bf7e3dcbfc8e" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.537028 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d57fs" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.683583 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd"] Sep 29 13:14:33 crc kubenswrapper[4611]: E0929 13:14:33.684203 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edd42a5-c566-4c3d-a2d9-6d9568c5396d" containerName="ssh-known-hosts-edpm-deployment" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.684228 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edd42a5-c566-4c3d-a2d9-6d9568c5396d" containerName="ssh-known-hosts-edpm-deployment" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.684415 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="0edd42a5-c566-4c3d-a2d9-6d9568c5396d" containerName="ssh-known-hosts-edpm-deployment" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.685285 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.692648 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.696589 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.697593 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.697875 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.698193 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd"] Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.808386 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb947\" (UniqueName: \"kubernetes.io/projected/fa84a9af-7a10-4a0b-8391-cc5db50e5275-kube-api-access-hb947\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.808565 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.808663 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.912053 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb947\" (UniqueName: \"kubernetes.io/projected/fa84a9af-7a10-4a0b-8391-cc5db50e5275-kube-api-access-hb947\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.912116 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.912175 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.920200 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.922703 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:33 crc kubenswrapper[4611]: I0929 13:14:33.935361 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb947\" (UniqueName: \"kubernetes.io/projected/fa84a9af-7a10-4a0b-8391-cc5db50e5275-kube-api-access-hb947\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2kljd\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.008109 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.320986 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r8gr9"] Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.330146 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.342779 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r8gr9"] Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.453202 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6w7q\" (UniqueName: \"kubernetes.io/projected/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-kube-api-access-l6w7q\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.453255 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-catalog-content\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.453354 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-utilities\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.557317 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-utilities\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.557493 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6w7q\" (UniqueName: \"kubernetes.io/projected/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-kube-api-access-l6w7q\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.557515 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-catalog-content\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.558175 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-utilities\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.559269 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-catalog-content\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.584939 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6w7q\" (UniqueName: \"kubernetes.io/projected/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-kube-api-access-l6w7q\") pod \"community-operators-r8gr9\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.628902 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.629893 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.648820 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd"] Sep 29 13:14:34 crc kubenswrapper[4611]: I0929 13:14:34.665386 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:35 crc kubenswrapper[4611]: I0929 13:14:35.264119 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r8gr9"] Sep 29 13:14:35 crc kubenswrapper[4611]: I0929 13:14:35.569602 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerStarted","Data":"720cd08926850b39c6f9e7c8be257626cd297122efa19f53500bcd4cdadf21a0"} Sep 29 13:14:35 crc kubenswrapper[4611]: I0929 13:14:35.576410 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" event={"ID":"fa84a9af-7a10-4a0b-8391-cc5db50e5275","Type":"ContainerStarted","Data":"f3fcf76b675d1dabecd10bdc6e097038384f7cd28e3007d62e34aed248ce992b"} Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.416421 4611 scope.go:117] "RemoveContainer" containerID="3a9344a9985a0d982dea79c6a3bb80fcf6d8d0a92d135cbd1041d7ac94e3a139" Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.449834 4611 scope.go:117] "RemoveContainer" containerID="1fea6c909c78c51a5c2de2b0e88f2bc5c0440e978e1ff6255bc68bad4c62fe8e" Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.502332 4611 scope.go:117] "RemoveContainer" containerID="05aa87a1dfb76a03cfa0f5ff2d5f96c824ca7158bb1514fffaf5b3c967410b49" Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.586651 4611 generic.go:334] "Generic (PLEG): container finished" podID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerID="307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d" exitCode=0 Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.586802 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerDied","Data":"307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d"} Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.594137 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" event={"ID":"fa84a9af-7a10-4a0b-8391-cc5db50e5275","Type":"ContainerStarted","Data":"ff4de78801312b860287f97d241d0a67de4c0f6d60225a457a41cf16cbe05d81"} Sep 29 13:14:36 crc kubenswrapper[4611]: I0929 13:14:36.635419 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" podStartSLOduration=3.04200014 podStartE2EDuration="3.635393286s" podCreationTimestamp="2025-09-29 13:14:33 +0000 UTC" firstStartedPulling="2025-09-29 13:14:34.65957363 +0000 UTC m=+2061.551093236" lastFinishedPulling="2025-09-29 13:14:35.252966776 +0000 UTC m=+2062.144486382" observedRunningTime="2025-09-29 13:14:36.630483604 +0000 UTC m=+2063.522003230" watchObservedRunningTime="2025-09-29 13:14:36.635393286 +0000 UTC m=+2063.526912892" Sep 29 13:14:38 crc kubenswrapper[4611]: I0929 13:14:38.615996 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerStarted","Data":"9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f"} Sep 29 13:14:40 crc kubenswrapper[4611]: I0929 13:14:40.640235 4611 generic.go:334] "Generic (PLEG): container finished" podID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerID="9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f" exitCode=0 Sep 29 13:14:40 crc kubenswrapper[4611]: I0929 13:14:40.640328 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerDied","Data":"9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f"} Sep 29 13:14:41 crc kubenswrapper[4611]: I0929 13:14:41.655154 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerStarted","Data":"a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba"} Sep 29 13:14:42 crc kubenswrapper[4611]: I0929 13:14:42.694028 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r8gr9" podStartSLOduration=3.909828464 podStartE2EDuration="8.693995136s" podCreationTimestamp="2025-09-29 13:14:34 +0000 UTC" firstStartedPulling="2025-09-29 13:14:36.588887452 +0000 UTC m=+2063.480407058" lastFinishedPulling="2025-09-29 13:14:41.373054124 +0000 UTC m=+2068.264573730" observedRunningTime="2025-09-29 13:14:42.687331493 +0000 UTC m=+2069.578851099" watchObservedRunningTime="2025-09-29 13:14:42.693995136 +0000 UTC m=+2069.585514742" Sep 29 13:14:44 crc kubenswrapper[4611]: I0929 13:14:44.666278 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:44 crc kubenswrapper[4611]: I0929 13:14:44.666799 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:44 crc kubenswrapper[4611]: I0929 13:14:44.688588 4611 generic.go:334] "Generic (PLEG): container finished" podID="fa84a9af-7a10-4a0b-8391-cc5db50e5275" containerID="ff4de78801312b860287f97d241d0a67de4c0f6d60225a457a41cf16cbe05d81" exitCode=0 Sep 29 13:14:44 crc kubenswrapper[4611]: I0929 13:14:44.688676 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" event={"ID":"fa84a9af-7a10-4a0b-8391-cc5db50e5275","Type":"ContainerDied","Data":"ff4de78801312b860287f97d241d0a67de4c0f6d60225a457a41cf16cbe05d81"} Sep 29 13:14:44 crc kubenswrapper[4611]: I0929 13:14:44.730858 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.140527 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.336708 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-inventory\") pod \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.337887 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb947\" (UniqueName: \"kubernetes.io/projected/fa84a9af-7a10-4a0b-8391-cc5db50e5275-kube-api-access-hb947\") pod \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.338764 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-ssh-key\") pod \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\" (UID: \"fa84a9af-7a10-4a0b-8391-cc5db50e5275\") " Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.345201 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa84a9af-7a10-4a0b-8391-cc5db50e5275-kube-api-access-hb947" (OuterVolumeSpecName: "kube-api-access-hb947") pod "fa84a9af-7a10-4a0b-8391-cc5db50e5275" (UID: "fa84a9af-7a10-4a0b-8391-cc5db50e5275"). InnerVolumeSpecName "kube-api-access-hb947". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.370565 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-inventory" (OuterVolumeSpecName: "inventory") pod "fa84a9af-7a10-4a0b-8391-cc5db50e5275" (UID: "fa84a9af-7a10-4a0b-8391-cc5db50e5275"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.375303 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa84a9af-7a10-4a0b-8391-cc5db50e5275" (UID: "fa84a9af-7a10-4a0b-8391-cc5db50e5275"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.442202 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.442739 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb947\" (UniqueName: \"kubernetes.io/projected/fa84a9af-7a10-4a0b-8391-cc5db50e5275-kube-api-access-hb947\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.442753 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa84a9af-7a10-4a0b-8391-cc5db50e5275-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.710945 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" event={"ID":"fa84a9af-7a10-4a0b-8391-cc5db50e5275","Type":"ContainerDied","Data":"f3fcf76b675d1dabecd10bdc6e097038384f7cd28e3007d62e34aed248ce992b"} Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.711001 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3fcf76b675d1dabecd10bdc6e097038384f7cd28e3007d62e34aed248ce992b" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.711301 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2kljd" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.792111 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z"] Sep 29 13:14:46 crc kubenswrapper[4611]: E0929 13:14:46.792614 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa84a9af-7a10-4a0b-8391-cc5db50e5275" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.792647 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa84a9af-7a10-4a0b-8391-cc5db50e5275" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.792829 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa84a9af-7a10-4a0b-8391-cc5db50e5275" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.793639 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.796914 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.797191 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.797232 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.797402 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.816984 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z"] Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.951710 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lgss\" (UniqueName: \"kubernetes.io/projected/366b3f96-580c-4545-b8b5-4e776b70e6c0-kube-api-access-4lgss\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.952020 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:46 crc kubenswrapper[4611]: I0929 13:14:46.952132 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.054097 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.054193 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.054431 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lgss\" (UniqueName: \"kubernetes.io/projected/366b3f96-580c-4545-b8b5-4e776b70e6c0-kube-api-access-4lgss\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.060881 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.070536 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.084813 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lgss\" (UniqueName: \"kubernetes.io/projected/366b3f96-580c-4545-b8b5-4e776b70e6c0-kube-api-access-4lgss\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.113477 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:14:47 crc kubenswrapper[4611]: I0929 13:14:47.778792 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z"] Sep 29 13:14:48 crc kubenswrapper[4611]: I0929 13:14:48.735244 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" event={"ID":"366b3f96-580c-4545-b8b5-4e776b70e6c0","Type":"ContainerStarted","Data":"e5a6fda6390d8e7e083d81284a0aa92e3f0d334fbca488e954642bff8f927c46"} Sep 29 13:14:48 crc kubenswrapper[4611]: I0929 13:14:48.735790 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" event={"ID":"366b3f96-580c-4545-b8b5-4e776b70e6c0","Type":"ContainerStarted","Data":"f9373af40c749ff00b8def47dcf9fc476b5ff5e90f135c08aa8b04eb232a84c2"} Sep 29 13:14:48 crc kubenswrapper[4611]: I0929 13:14:48.771726 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" podStartSLOduration=2.331036144 podStartE2EDuration="2.771691808s" podCreationTimestamp="2025-09-29 13:14:46 +0000 UTC" firstStartedPulling="2025-09-29 13:14:47.782543144 +0000 UTC m=+2074.674062760" lastFinishedPulling="2025-09-29 13:14:48.223198808 +0000 UTC m=+2075.114718424" observedRunningTime="2025-09-29 13:14:48.759399093 +0000 UTC m=+2075.650918689" watchObservedRunningTime="2025-09-29 13:14:48.771691808 +0000 UTC m=+2075.663211424" Sep 29 13:14:54 crc kubenswrapper[4611]: I0929 13:14:54.729754 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:54 crc kubenswrapper[4611]: I0929 13:14:54.792265 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r8gr9"] Sep 29 13:14:54 crc kubenswrapper[4611]: I0929 13:14:54.792513 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r8gr9" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="registry-server" containerID="cri-o://a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba" gracePeriod=2 Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.280433 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.372004 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-utilities\") pod \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.372087 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-catalog-content\") pod \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.372129 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6w7q\" (UniqueName: \"kubernetes.io/projected/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-kube-api-access-l6w7q\") pod \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\" (UID: \"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3\") " Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.372971 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-utilities" (OuterVolumeSpecName: "utilities") pod "80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" (UID: "80fcd6aa-007f-4b6d-8c74-d347e46fe9b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.378610 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-kube-api-access-l6w7q" (OuterVolumeSpecName: "kube-api-access-l6w7q") pod "80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" (UID: "80fcd6aa-007f-4b6d-8c74-d347e46fe9b3"). InnerVolumeSpecName "kube-api-access-l6w7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.422106 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" (UID: "80fcd6aa-007f-4b6d-8c74-d347e46fe9b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.474587 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.474669 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6w7q\" (UniqueName: \"kubernetes.io/projected/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-kube-api-access-l6w7q\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.474687 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.806106 4611 generic.go:334] "Generic (PLEG): container finished" podID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerID="a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba" exitCode=0 Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.806202 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerDied","Data":"a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba"} Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.806224 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8gr9" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.806307 4611 scope.go:117] "RemoveContainer" containerID="a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.806291 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8gr9" event={"ID":"80fcd6aa-007f-4b6d-8c74-d347e46fe9b3","Type":"ContainerDied","Data":"720cd08926850b39c6f9e7c8be257626cd297122efa19f53500bcd4cdadf21a0"} Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.845991 4611 scope.go:117] "RemoveContainer" containerID="9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.847749 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r8gr9"] Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.864928 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r8gr9"] Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.877147 4611 scope.go:117] "RemoveContainer" containerID="307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.912613 4611 scope.go:117] "RemoveContainer" containerID="a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba" Sep 29 13:14:55 crc kubenswrapper[4611]: E0929 13:14:55.913069 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba\": container with ID starting with a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba not found: ID does not exist" containerID="a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.913103 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba"} err="failed to get container status \"a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba\": rpc error: code = NotFound desc = could not find container \"a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba\": container with ID starting with a6853d44376b028ccf16b18e7e4481481e190f196614418dd3a330fda127a1ba not found: ID does not exist" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.913131 4611 scope.go:117] "RemoveContainer" containerID="9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f" Sep 29 13:14:55 crc kubenswrapper[4611]: E0929 13:14:55.913682 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f\": container with ID starting with 9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f not found: ID does not exist" containerID="9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.913728 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f"} err="failed to get container status \"9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f\": rpc error: code = NotFound desc = could not find container \"9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f\": container with ID starting with 9b6b6ba78f9633093e2a5b14aa2296205ed15f43adeb8b73ceb460f8d4eb6f8f not found: ID does not exist" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.913758 4611 scope.go:117] "RemoveContainer" containerID="307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d" Sep 29 13:14:55 crc kubenswrapper[4611]: E0929 13:14:55.914195 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d\": container with ID starting with 307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d not found: ID does not exist" containerID="307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d" Sep 29 13:14:55 crc kubenswrapper[4611]: I0929 13:14:55.914217 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d"} err="failed to get container status \"307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d\": rpc error: code = NotFound desc = could not find container \"307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d\": container with ID starting with 307415c14ab10a430e2fd1e4e2121d711fbf230a5f11b6a9082bf86ed3ca725d not found: ID does not exist" Sep 29 13:14:57 crc kubenswrapper[4611]: I0929 13:14:57.753287 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" path="/var/lib/kubelet/pods/80fcd6aa-007f-4b6d-8c74-d347e46fe9b3/volumes" Sep 29 13:14:58 crc kubenswrapper[4611]: I0929 13:14:58.844001 4611 generic.go:334] "Generic (PLEG): container finished" podID="366b3f96-580c-4545-b8b5-4e776b70e6c0" containerID="e5a6fda6390d8e7e083d81284a0aa92e3f0d334fbca488e954642bff8f927c46" exitCode=0 Sep 29 13:14:58 crc kubenswrapper[4611]: I0929 13:14:58.844051 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" event={"ID":"366b3f96-580c-4545-b8b5-4e776b70e6c0","Type":"ContainerDied","Data":"e5a6fda6390d8e7e083d81284a0aa92e3f0d334fbca488e954642bff8f927c46"} Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.154753 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz"] Sep 29 13:15:00 crc kubenswrapper[4611]: E0929 13:15:00.155663 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="extract-utilities" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.155678 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="extract-utilities" Sep 29 13:15:00 crc kubenswrapper[4611]: E0929 13:15:00.155708 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="registry-server" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.155716 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="registry-server" Sep 29 13:15:00 crc kubenswrapper[4611]: E0929 13:15:00.155728 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="extract-content" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.155733 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="extract-content" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.155967 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="80fcd6aa-007f-4b6d-8c74-d347e46fe9b3" containerName="registry-server" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.156974 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.166127 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.166176 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.168590 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz"] Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.288853 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf25x\" (UniqueName: \"kubernetes.io/projected/31987558-1398-4162-91c5-67884f605277-kube-api-access-tf25x\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.289706 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31987558-1398-4162-91c5-67884f605277-config-volume\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.289833 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31987558-1398-4162-91c5-67884f605277-secret-volume\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.333070 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.391687 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-ssh-key\") pod \"366b3f96-580c-4545-b8b5-4e776b70e6c0\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.391806 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-inventory\") pod \"366b3f96-580c-4545-b8b5-4e776b70e6c0\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.392026 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lgss\" (UniqueName: \"kubernetes.io/projected/366b3f96-580c-4545-b8b5-4e776b70e6c0-kube-api-access-4lgss\") pod \"366b3f96-580c-4545-b8b5-4e776b70e6c0\" (UID: \"366b3f96-580c-4545-b8b5-4e776b70e6c0\") " Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.392528 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31987558-1398-4162-91c5-67884f605277-config-volume\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.392573 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31987558-1398-4162-91c5-67884f605277-secret-volume\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.392654 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf25x\" (UniqueName: \"kubernetes.io/projected/31987558-1398-4162-91c5-67884f605277-kube-api-access-tf25x\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.393805 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31987558-1398-4162-91c5-67884f605277-config-volume\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.411912 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31987558-1398-4162-91c5-67884f605277-secret-volume\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.412687 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/366b3f96-580c-4545-b8b5-4e776b70e6c0-kube-api-access-4lgss" (OuterVolumeSpecName: "kube-api-access-4lgss") pod "366b3f96-580c-4545-b8b5-4e776b70e6c0" (UID: "366b3f96-580c-4545-b8b5-4e776b70e6c0"). InnerVolumeSpecName "kube-api-access-4lgss". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.415692 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf25x\" (UniqueName: \"kubernetes.io/projected/31987558-1398-4162-91c5-67884f605277-kube-api-access-tf25x\") pod \"collect-profiles-29319195-kwcgz\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.423148 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "366b3f96-580c-4545-b8b5-4e776b70e6c0" (UID: "366b3f96-580c-4545-b8b5-4e776b70e6c0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.436773 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-inventory" (OuterVolumeSpecName: "inventory") pod "366b3f96-580c-4545-b8b5-4e776b70e6c0" (UID: "366b3f96-580c-4545-b8b5-4e776b70e6c0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.487871 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.494238 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lgss\" (UniqueName: \"kubernetes.io/projected/366b3f96-580c-4545-b8b5-4e776b70e6c0-kube-api-access-4lgss\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.494263 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.494272 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/366b3f96-580c-4545-b8b5-4e776b70e6c0-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.865440 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" event={"ID":"366b3f96-580c-4545-b8b5-4e776b70e6c0","Type":"ContainerDied","Data":"f9373af40c749ff00b8def47dcf9fc476b5ff5e90f135c08aa8b04eb232a84c2"} Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.865829 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9373af40c749ff00b8def47dcf9fc476b5ff5e90f135c08aa8b04eb232a84c2" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.865537 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.959835 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v"] Sep 29 13:15:00 crc kubenswrapper[4611]: E0929 13:15:00.960260 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="366b3f96-580c-4545-b8b5-4e776b70e6c0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.960275 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="366b3f96-580c-4545-b8b5-4e776b70e6c0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.960493 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="366b3f96-580c-4545-b8b5-4e776b70e6c0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.961230 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.962971 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.968755 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.968881 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.968965 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.969085 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.969191 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.969216 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.968763 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:15:00 crc kubenswrapper[4611]: I0929 13:15:00.989802 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v"] Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004567 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004670 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004755 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9tq9\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-kube-api-access-b9tq9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004775 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004854 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004905 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.004943 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005039 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005069 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005121 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005182 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005226 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005246 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.005288 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: W0929 13:15:01.030151 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31987558_1398_4162_91c5_67884f605277.slice/crio-cd3450ef44f628bb53fd4f8efb16ea03c264c8737b11fed6a7450216ff1de901 WatchSource:0}: Error finding container cd3450ef44f628bb53fd4f8efb16ea03c264c8737b11fed6a7450216ff1de901: Status 404 returned error can't find the container with id cd3450ef44f628bb53fd4f8efb16ea03c264c8737b11fed6a7450216ff1de901 Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.042150 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz"] Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106716 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106771 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106806 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106845 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106862 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106881 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106902 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106947 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.106971 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.107010 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9tq9\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-kube-api-access-b9tq9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.107028 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.107079 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.107096 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.107137 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.115767 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.116062 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.116120 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.116558 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.117504 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.117990 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.118270 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.119256 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.119797 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.119958 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.120662 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.124509 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.124918 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.126876 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9tq9\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-kube-api-access-b9tq9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.287970 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.876269 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" event={"ID":"31987558-1398-4162-91c5-67884f605277","Type":"ContainerStarted","Data":"5d853325eb8fcc2ce2294563a1851bc5d83eceda582b0c667b627ba37f74e121"} Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.876762 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" event={"ID":"31987558-1398-4162-91c5-67884f605277","Type":"ContainerStarted","Data":"cd3450ef44f628bb53fd4f8efb16ea03c264c8737b11fed6a7450216ff1de901"} Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.936879 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" podStartSLOduration=1.936856033 podStartE2EDuration="1.936856033s" podCreationTimestamp="2025-09-29 13:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:15:01.907218637 +0000 UTC m=+2088.798738243" watchObservedRunningTime="2025-09-29 13:15:01.936856033 +0000 UTC m=+2088.828375639" Sep 29 13:15:01 crc kubenswrapper[4611]: I0929 13:15:01.940715 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v"] Sep 29 13:15:01 crc kubenswrapper[4611]: W0929 13:15:01.948705 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e06e443_7ad2_4078_9023_3605912748c6.slice/crio-3e7fca91d3b4a0c66539cc1ebcbea5878c2d9cc069ee3130fa53dda1d3341729 WatchSource:0}: Error finding container 3e7fca91d3b4a0c66539cc1ebcbea5878c2d9cc069ee3130fa53dda1d3341729: Status 404 returned error can't find the container with id 3e7fca91d3b4a0c66539cc1ebcbea5878c2d9cc069ee3130fa53dda1d3341729 Sep 29 13:15:02 crc kubenswrapper[4611]: I0929 13:15:02.891772 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" event={"ID":"3e06e443-7ad2-4078-9023-3605912748c6","Type":"ContainerStarted","Data":"3e7fca91d3b4a0c66539cc1ebcbea5878c2d9cc069ee3130fa53dda1d3341729"} Sep 29 13:15:02 crc kubenswrapper[4611]: I0929 13:15:02.895271 4611 generic.go:334] "Generic (PLEG): container finished" podID="31987558-1398-4162-91c5-67884f605277" containerID="5d853325eb8fcc2ce2294563a1851bc5d83eceda582b0c667b627ba37f74e121" exitCode=0 Sep 29 13:15:02 crc kubenswrapper[4611]: I0929 13:15:02.895321 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" event={"ID":"31987558-1398-4162-91c5-67884f605277","Type":"ContainerDied","Data":"5d853325eb8fcc2ce2294563a1851bc5d83eceda582b0c667b627ba37f74e121"} Sep 29 13:15:03 crc kubenswrapper[4611]: I0929 13:15:03.905473 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" event={"ID":"3e06e443-7ad2-4078-9023-3605912748c6","Type":"ContainerStarted","Data":"95540e1569759e80cc22ce3c1657b23a660626f6462783f27e4d6de29848e114"} Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.250303 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.267870 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" podStartSLOduration=3.393140376 podStartE2EDuration="4.267841313s" podCreationTimestamp="2025-09-29 13:15:00 +0000 UTC" firstStartedPulling="2025-09-29 13:15:01.951776935 +0000 UTC m=+2088.843296541" lastFinishedPulling="2025-09-29 13:15:02.826477872 +0000 UTC m=+2089.717997478" observedRunningTime="2025-09-29 13:15:03.934028587 +0000 UTC m=+2090.825548203" watchObservedRunningTime="2025-09-29 13:15:04.267841313 +0000 UTC m=+2091.159360919" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.388268 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf25x\" (UniqueName: \"kubernetes.io/projected/31987558-1398-4162-91c5-67884f605277-kube-api-access-tf25x\") pod \"31987558-1398-4162-91c5-67884f605277\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.388333 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31987558-1398-4162-91c5-67884f605277-secret-volume\") pod \"31987558-1398-4162-91c5-67884f605277\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.388614 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31987558-1398-4162-91c5-67884f605277-config-volume\") pod \"31987558-1398-4162-91c5-67884f605277\" (UID: \"31987558-1398-4162-91c5-67884f605277\") " Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.389758 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31987558-1398-4162-91c5-67884f605277-config-volume" (OuterVolumeSpecName: "config-volume") pod "31987558-1398-4162-91c5-67884f605277" (UID: "31987558-1398-4162-91c5-67884f605277"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.395717 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31987558-1398-4162-91c5-67884f605277-kube-api-access-tf25x" (OuterVolumeSpecName: "kube-api-access-tf25x") pod "31987558-1398-4162-91c5-67884f605277" (UID: "31987558-1398-4162-91c5-67884f605277"). InnerVolumeSpecName "kube-api-access-tf25x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.396022 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31987558-1398-4162-91c5-67884f605277-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "31987558-1398-4162-91c5-67884f605277" (UID: "31987558-1398-4162-91c5-67884f605277"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.491244 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31987558-1398-4162-91c5-67884f605277-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.491285 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf25x\" (UniqueName: \"kubernetes.io/projected/31987558-1398-4162-91c5-67884f605277-kube-api-access-tf25x\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.491303 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31987558-1398-4162-91c5-67884f605277-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.629002 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.629069 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.915891 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.915886 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz" event={"ID":"31987558-1398-4162-91c5-67884f605277","Type":"ContainerDied","Data":"cd3450ef44f628bb53fd4f8efb16ea03c264c8737b11fed6a7450216ff1de901"} Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.915969 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd3450ef44f628bb53fd4f8efb16ea03c264c8737b11fed6a7450216ff1de901" Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.987897 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5"] Sep 29 13:15:04 crc kubenswrapper[4611]: I0929 13:15:04.995985 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319150-w8dd5"] Sep 29 13:15:05 crc kubenswrapper[4611]: I0929 13:15:05.747689 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d33d2c15-5789-4553-b00c-b9c2d5f332a2" path="/var/lib/kubelet/pods/d33d2c15-5789-4553-b00c-b9c2d5f332a2/volumes" Sep 29 13:15:34 crc kubenswrapper[4611]: I0929 13:15:34.628825 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:15:34 crc kubenswrapper[4611]: I0929 13:15:34.629687 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:15:34 crc kubenswrapper[4611]: I0929 13:15:34.629765 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:15:34 crc kubenswrapper[4611]: I0929 13:15:34.630828 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"07a8b1ce6659f69a7f8378fefa7226292ee4c44508f137640082c9911fcec9a1"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:15:34 crc kubenswrapper[4611]: I0929 13:15:34.630898 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://07a8b1ce6659f69a7f8378fefa7226292ee4c44508f137640082c9911fcec9a1" gracePeriod=600 Sep 29 13:15:35 crc kubenswrapper[4611]: I0929 13:15:35.228072 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="07a8b1ce6659f69a7f8378fefa7226292ee4c44508f137640082c9911fcec9a1" exitCode=0 Sep 29 13:15:35 crc kubenswrapper[4611]: I0929 13:15:35.228692 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"07a8b1ce6659f69a7f8378fefa7226292ee4c44508f137640082c9911fcec9a1"} Sep 29 13:15:35 crc kubenswrapper[4611]: I0929 13:15:35.228733 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726"} Sep 29 13:15:35 crc kubenswrapper[4611]: I0929 13:15:35.228756 4611 scope.go:117] "RemoveContainer" containerID="992ddebae335d566473de6cb65efe6783c090710d787df70431348d27c9704dc" Sep 29 13:15:36 crc kubenswrapper[4611]: I0929 13:15:36.564206 4611 scope.go:117] "RemoveContainer" containerID="bed35ece5d9606afb6f9809c3ccdeee8c237f5a0d00e9c88099f85181575351c" Sep 29 13:15:43 crc kubenswrapper[4611]: I0929 13:15:43.322873 4611 generic.go:334] "Generic (PLEG): container finished" podID="3e06e443-7ad2-4078-9023-3605912748c6" containerID="95540e1569759e80cc22ce3c1657b23a660626f6462783f27e4d6de29848e114" exitCode=0 Sep 29 13:15:43 crc kubenswrapper[4611]: I0929 13:15:43.322928 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" event={"ID":"3e06e443-7ad2-4078-9023-3605912748c6","Type":"ContainerDied","Data":"95540e1569759e80cc22ce3c1657b23a660626f6462783f27e4d6de29848e114"} Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.839276 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.970703 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-neutron-metadata-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.970787 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.970856 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ovn-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.970940 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-libvirt-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971095 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9tq9\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-kube-api-access-b9tq9\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971150 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-repo-setup-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971190 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-nova-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971239 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971291 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971349 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-telemetry-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971479 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-inventory\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971507 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-bootstrap-combined-ca-bundle\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971532 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ssh-key\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.971585 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"3e06e443-7ad2-4078-9023-3605912748c6\" (UID: \"3e06e443-7ad2-4078-9023-3605912748c6\") " Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.979040 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.981159 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.981948 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.982098 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-kube-api-access-b9tq9" (OuterVolumeSpecName: "kube-api-access-b9tq9") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "kube-api-access-b9tq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.982518 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.984112 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.985310 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.986223 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.988070 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.988557 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.990943 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:44 crc kubenswrapper[4611]: I0929 13:15:44.991496 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.011848 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-inventory" (OuterVolumeSpecName: "inventory") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.021775 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3e06e443-7ad2-4078-9023-3605912748c6" (UID: "3e06e443-7ad2-4078-9023-3605912748c6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074368 4611 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074409 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9tq9\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-kube-api-access-b9tq9\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074422 4611 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074436 4611 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074452 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074463 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074474 4611 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074485 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074496 4611 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074508 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074521 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074537 4611 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074554 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e06e443-7ad2-4078-9023-3605912748c6-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.074569 4611 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e06e443-7ad2-4078-9023-3605912748c6-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.351317 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" event={"ID":"3e06e443-7ad2-4078-9023-3605912748c6","Type":"ContainerDied","Data":"3e7fca91d3b4a0c66539cc1ebcbea5878c2d9cc069ee3130fa53dda1d3341729"} Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.351375 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e7fca91d3b4a0c66539cc1ebcbea5878c2d9cc069ee3130fa53dda1d3341729" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.351460 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.501640 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l"] Sep 29 13:15:45 crc kubenswrapper[4611]: E0929 13:15:45.502673 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31987558-1398-4162-91c5-67884f605277" containerName="collect-profiles" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.502693 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="31987558-1398-4162-91c5-67884f605277" containerName="collect-profiles" Sep 29 13:15:45 crc kubenswrapper[4611]: E0929 13:15:45.502768 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e06e443-7ad2-4078-9023-3605912748c6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.502780 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e06e443-7ad2-4078-9023-3605912748c6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.503041 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e06e443-7ad2-4078-9023-3605912748c6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.503070 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="31987558-1398-4162-91c5-67884f605277" containerName="collect-profiles" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.504116 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.506359 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.506788 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.507057 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.507208 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.507351 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.513686 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l"] Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.587060 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.587579 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.587670 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.588048 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.588297 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrzmt\" (UniqueName: \"kubernetes.io/projected/a58bbd72-3a85-4f21-9d48-d7bfcc527310-kube-api-access-jrzmt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.690758 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.690954 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.691029 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.691125 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.691211 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrzmt\" (UniqueName: \"kubernetes.io/projected/a58bbd72-3a85-4f21-9d48-d7bfcc527310-kube-api-access-jrzmt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.693457 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.696244 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.696387 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.696945 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.711646 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrzmt\" (UniqueName: \"kubernetes.io/projected/a58bbd72-3a85-4f21-9d48-d7bfcc527310-kube-api-access-jrzmt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-prd7l\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:45 crc kubenswrapper[4611]: I0929 13:15:45.824107 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:15:46 crc kubenswrapper[4611]: I0929 13:15:46.445410 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l"] Sep 29 13:15:47 crc kubenswrapper[4611]: I0929 13:15:47.373696 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" event={"ID":"a58bbd72-3a85-4f21-9d48-d7bfcc527310","Type":"ContainerStarted","Data":"ef3413fa6ef4ad10133c5944986c489426479acda4252fa654be261c63d943c4"} Sep 29 13:15:47 crc kubenswrapper[4611]: I0929 13:15:47.374734 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" event={"ID":"a58bbd72-3a85-4f21-9d48-d7bfcc527310","Type":"ContainerStarted","Data":"72efdde3260fe37c076f5b20f083a166d89dbbc7046dd4445b95351dc4601b2d"} Sep 29 13:15:47 crc kubenswrapper[4611]: I0929 13:15:47.400405 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" podStartSLOduration=1.920429082 podStartE2EDuration="2.40037227s" podCreationTimestamp="2025-09-29 13:15:45 +0000 UTC" firstStartedPulling="2025-09-29 13:15:46.491018263 +0000 UTC m=+2133.382537869" lastFinishedPulling="2025-09-29 13:15:46.970961451 +0000 UTC m=+2133.862481057" observedRunningTime="2025-09-29 13:15:47.393396999 +0000 UTC m=+2134.284916625" watchObservedRunningTime="2025-09-29 13:15:47.40037227 +0000 UTC m=+2134.291891876" Sep 29 13:16:54 crc kubenswrapper[4611]: I0929 13:16:54.072556 4611 generic.go:334] "Generic (PLEG): container finished" podID="a58bbd72-3a85-4f21-9d48-d7bfcc527310" containerID="ef3413fa6ef4ad10133c5944986c489426479acda4252fa654be261c63d943c4" exitCode=0 Sep 29 13:16:54 crc kubenswrapper[4611]: I0929 13:16:54.072689 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" event={"ID":"a58bbd72-3a85-4f21-9d48-d7bfcc527310","Type":"ContainerDied","Data":"ef3413fa6ef4ad10133c5944986c489426479acda4252fa654be261c63d943c4"} Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.622345 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.689901 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovncontroller-config-0\") pod \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.689962 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovn-combined-ca-bundle\") pod \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.690014 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrzmt\" (UniqueName: \"kubernetes.io/projected/a58bbd72-3a85-4f21-9d48-d7bfcc527310-kube-api-access-jrzmt\") pod \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.690052 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-inventory\") pod \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.690088 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ssh-key\") pod \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\" (UID: \"a58bbd72-3a85-4f21-9d48-d7bfcc527310\") " Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.696542 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a58bbd72-3a85-4f21-9d48-d7bfcc527310-kube-api-access-jrzmt" (OuterVolumeSpecName: "kube-api-access-jrzmt") pod "a58bbd72-3a85-4f21-9d48-d7bfcc527310" (UID: "a58bbd72-3a85-4f21-9d48-d7bfcc527310"). InnerVolumeSpecName "kube-api-access-jrzmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.716510 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a58bbd72-3a85-4f21-9d48-d7bfcc527310" (UID: "a58bbd72-3a85-4f21-9d48-d7bfcc527310"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.716986 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "a58bbd72-3a85-4f21-9d48-d7bfcc527310" (UID: "a58bbd72-3a85-4f21-9d48-d7bfcc527310"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.719980 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a58bbd72-3a85-4f21-9d48-d7bfcc527310" (UID: "a58bbd72-3a85-4f21-9d48-d7bfcc527310"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.722305 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-inventory" (OuterVolumeSpecName: "inventory") pod "a58bbd72-3a85-4f21-9d48-d7bfcc527310" (UID: "a58bbd72-3a85-4f21-9d48-d7bfcc527310"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.791823 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.791987 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.792180 4611 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.792289 4611 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a58bbd72-3a85-4f21-9d48-d7bfcc527310-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:16:55 crc kubenswrapper[4611]: I0929 13:16:55.792351 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrzmt\" (UniqueName: \"kubernetes.io/projected/a58bbd72-3a85-4f21-9d48-d7bfcc527310-kube-api-access-jrzmt\") on node \"crc\" DevicePath \"\"" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.099455 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" event={"ID":"a58bbd72-3a85-4f21-9d48-d7bfcc527310","Type":"ContainerDied","Data":"72efdde3260fe37c076f5b20f083a166d89dbbc7046dd4445b95351dc4601b2d"} Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.099500 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72efdde3260fe37c076f5b20f083a166d89dbbc7046dd4445b95351dc4601b2d" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.099596 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-prd7l" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.274470 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h"] Sep 29 13:16:56 crc kubenswrapper[4611]: E0929 13:16:56.274904 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a58bbd72-3a85-4f21-9d48-d7bfcc527310" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.274924 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a58bbd72-3a85-4f21-9d48-d7bfcc527310" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.275138 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a58bbd72-3a85-4f21-9d48-d7bfcc527310" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.275925 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.279328 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.279680 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.282068 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.282163 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.282377 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.287796 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.297337 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h"] Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.305170 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.305229 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.305262 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.305292 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgs6v\" (UniqueName: \"kubernetes.io/projected/42a9b4b7-e479-4c75-9713-d80f50ff45d8-kube-api-access-rgs6v\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.305327 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.305503 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.406894 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.406984 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.407027 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.407049 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.407071 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgs6v\" (UniqueName: \"kubernetes.io/projected/42a9b4b7-e479-4c75-9713-d80f50ff45d8-kube-api-access-rgs6v\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.407096 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.410754 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.411474 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.412441 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.413476 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.415817 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.427076 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgs6v\" (UniqueName: \"kubernetes.io/projected/42a9b4b7-e479-4c75-9713-d80f50ff45d8-kube-api-access-rgs6v\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:56 crc kubenswrapper[4611]: I0929 13:16:56.594059 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:16:57 crc kubenswrapper[4611]: W0929 13:16:57.196883 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42a9b4b7_e479_4c75_9713_d80f50ff45d8.slice/crio-08586f377c733aca6e94e6f4093cda379e9286e90d3ec6b15cddb3ddda52132f WatchSource:0}: Error finding container 08586f377c733aca6e94e6f4093cda379e9286e90d3ec6b15cddb3ddda52132f: Status 404 returned error can't find the container with id 08586f377c733aca6e94e6f4093cda379e9286e90d3ec6b15cddb3ddda52132f Sep 29 13:16:57 crc kubenswrapper[4611]: I0929 13:16:57.206488 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h"] Sep 29 13:16:58 crc kubenswrapper[4611]: I0929 13:16:58.124997 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" event={"ID":"42a9b4b7-e479-4c75-9713-d80f50ff45d8","Type":"ContainerStarted","Data":"b1740717dca64d53aa102e6e15945bcfdb16fb525a1ef7852729a585aed22c7f"} Sep 29 13:16:58 crc kubenswrapper[4611]: I0929 13:16:58.125399 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" event={"ID":"42a9b4b7-e479-4c75-9713-d80f50ff45d8","Type":"ContainerStarted","Data":"08586f377c733aca6e94e6f4093cda379e9286e90d3ec6b15cddb3ddda52132f"} Sep 29 13:16:58 crc kubenswrapper[4611]: I0929 13:16:58.151453 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" podStartSLOduration=1.694753366 podStartE2EDuration="2.151423883s" podCreationTimestamp="2025-09-29 13:16:56 +0000 UTC" firstStartedPulling="2025-09-29 13:16:57.19971876 +0000 UTC m=+2204.091238386" lastFinishedPulling="2025-09-29 13:16:57.656389297 +0000 UTC m=+2204.547908903" observedRunningTime="2025-09-29 13:16:58.141955429 +0000 UTC m=+2205.033475065" watchObservedRunningTime="2025-09-29 13:16:58.151423883 +0000 UTC m=+2205.042943489" Sep 29 13:17:48 crc kubenswrapper[4611]: I0929 13:17:48.635436 4611 generic.go:334] "Generic (PLEG): container finished" podID="42a9b4b7-e479-4c75-9713-d80f50ff45d8" containerID="b1740717dca64d53aa102e6e15945bcfdb16fb525a1ef7852729a585aed22c7f" exitCode=0 Sep 29 13:17:48 crc kubenswrapper[4611]: I0929 13:17:48.635497 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" event={"ID":"42a9b4b7-e479-4c75-9713-d80f50ff45d8","Type":"ContainerDied","Data":"b1740717dca64d53aa102e6e15945bcfdb16fb525a1ef7852729a585aed22c7f"} Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.286892 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.434285 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-inventory\") pod \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.434445 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-metadata-combined-ca-bundle\") pod \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.434574 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-nova-metadata-neutron-config-0\") pod \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.434648 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.434675 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-ssh-key\") pod \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.434753 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgs6v\" (UniqueName: \"kubernetes.io/projected/42a9b4b7-e479-4c75-9713-d80f50ff45d8-kube-api-access-rgs6v\") pod \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\" (UID: \"42a9b4b7-e479-4c75-9713-d80f50ff45d8\") " Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.440832 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "42a9b4b7-e479-4c75-9713-d80f50ff45d8" (UID: "42a9b4b7-e479-4c75-9713-d80f50ff45d8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.441232 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42a9b4b7-e479-4c75-9713-d80f50ff45d8-kube-api-access-rgs6v" (OuterVolumeSpecName: "kube-api-access-rgs6v") pod "42a9b4b7-e479-4c75-9713-d80f50ff45d8" (UID: "42a9b4b7-e479-4c75-9713-d80f50ff45d8"). InnerVolumeSpecName "kube-api-access-rgs6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.467989 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "42a9b4b7-e479-4c75-9713-d80f50ff45d8" (UID: "42a9b4b7-e479-4c75-9713-d80f50ff45d8"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.468759 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "42a9b4b7-e479-4c75-9713-d80f50ff45d8" (UID: "42a9b4b7-e479-4c75-9713-d80f50ff45d8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.469821 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-inventory" (OuterVolumeSpecName: "inventory") pod "42a9b4b7-e479-4c75-9713-d80f50ff45d8" (UID: "42a9b4b7-e479-4c75-9713-d80f50ff45d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.473357 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "42a9b4b7-e479-4c75-9713-d80f50ff45d8" (UID: "42a9b4b7-e479-4c75-9713-d80f50ff45d8"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.537581 4611 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.537640 4611 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.537654 4611 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.537665 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.537676 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgs6v\" (UniqueName: \"kubernetes.io/projected/42a9b4b7-e479-4c75-9713-d80f50ff45d8-kube-api-access-rgs6v\") on node \"crc\" DevicePath \"\"" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.537687 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42a9b4b7-e479-4c75-9713-d80f50ff45d8-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.655812 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" event={"ID":"42a9b4b7-e479-4c75-9713-d80f50ff45d8","Type":"ContainerDied","Data":"08586f377c733aca6e94e6f4093cda379e9286e90d3ec6b15cddb3ddda52132f"} Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.655856 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08586f377c733aca6e94e6f4093cda379e9286e90d3ec6b15cddb3ddda52132f" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.655918 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.758033 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx"] Sep 29 13:17:50 crc kubenswrapper[4611]: E0929 13:17:50.758539 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42a9b4b7-e479-4c75-9713-d80f50ff45d8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.758568 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="42a9b4b7-e479-4c75-9713-d80f50ff45d8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.758875 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="42a9b4b7-e479-4c75-9713-d80f50ff45d8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.759821 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.768150 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.768150 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.768216 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.768698 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.768808 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.787036 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx"] Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.945965 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.946567 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.946593 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jtkh\" (UniqueName: \"kubernetes.io/projected/678b329e-0ba8-4901-94e3-51738d9317c0-kube-api-access-5jtkh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.946813 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:50 crc kubenswrapper[4611]: I0929 13:17:50.947183 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.050199 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.050270 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.050295 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jtkh\" (UniqueName: \"kubernetes.io/projected/678b329e-0ba8-4901-94e3-51738d9317c0-kube-api-access-5jtkh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.050319 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.050384 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.054540 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.055153 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.055812 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.056683 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.070345 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jtkh\" (UniqueName: \"kubernetes.io/projected/678b329e-0ba8-4901-94e3-51738d9317c0-kube-api-access-5jtkh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-rhthx\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.080067 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:17:51 crc kubenswrapper[4611]: I0929 13:17:51.651401 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx"] Sep 29 13:17:52 crc kubenswrapper[4611]: I0929 13:17:52.678386 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" event={"ID":"678b329e-0ba8-4901-94e3-51738d9317c0","Type":"ContainerStarted","Data":"8d55342082266f720dbbce13ced879d995d5b6122db0b3b834a749cc503bd744"} Sep 29 13:17:52 crc kubenswrapper[4611]: I0929 13:17:52.679046 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" event={"ID":"678b329e-0ba8-4901-94e3-51738d9317c0","Type":"ContainerStarted","Data":"8a6fd3bc1aede466f7f458f0dd2d32b5bc79e2f6f4c6b77398d687b9bf4594bd"} Sep 29 13:17:52 crc kubenswrapper[4611]: I0929 13:17:52.703197 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" podStartSLOduration=1.9901240009999999 podStartE2EDuration="2.703172436s" podCreationTimestamp="2025-09-29 13:17:50 +0000 UTC" firstStartedPulling="2025-09-29 13:17:51.665410857 +0000 UTC m=+2258.556930463" lastFinishedPulling="2025-09-29 13:17:52.378459292 +0000 UTC m=+2259.269978898" observedRunningTime="2025-09-29 13:17:52.701975931 +0000 UTC m=+2259.593495567" watchObservedRunningTime="2025-09-29 13:17:52.703172436 +0000 UTC m=+2259.594692042" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.615295 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wcct5"] Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.618411 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.632542 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcct5"] Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.756075 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwvdx\" (UniqueName: \"kubernetes.io/projected/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-kube-api-access-vwvdx\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.756262 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-catalog-content\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.756320 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-utilities\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.858108 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-catalog-content\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.858528 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-utilities\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.858746 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwvdx\" (UniqueName: \"kubernetes.io/projected/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-kube-api-access-vwvdx\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.859267 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-catalog-content\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.859738 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-utilities\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.891023 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwvdx\" (UniqueName: \"kubernetes.io/projected/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-kube-api-access-vwvdx\") pod \"redhat-marketplace-wcct5\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:17:59 crc kubenswrapper[4611]: I0929 13:17:59.943021 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:00 crc kubenswrapper[4611]: I0929 13:18:00.500004 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcct5"] Sep 29 13:18:00 crc kubenswrapper[4611]: I0929 13:18:00.796761 4611 generic.go:334] "Generic (PLEG): container finished" podID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerID="8eab6350a162b81a6ebdca51b4dd15a2e8aded873a340d837985ec201105a27b" exitCode=0 Sep 29 13:18:00 crc kubenswrapper[4611]: I0929 13:18:00.796828 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerDied","Data":"8eab6350a162b81a6ebdca51b4dd15a2e8aded873a340d837985ec201105a27b"} Sep 29 13:18:00 crc kubenswrapper[4611]: I0929 13:18:00.797149 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerStarted","Data":"e4f2c5657c905127d863e3bcd4b52f18dd74476c2356dd4749903a31a756dcaa"} Sep 29 13:18:01 crc kubenswrapper[4611]: I0929 13:18:01.808739 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerStarted","Data":"d5ea64182e222b86d03bec8d65c4895a672db3994a8e501fa578a654ab91a511"} Sep 29 13:18:02 crc kubenswrapper[4611]: I0929 13:18:02.821556 4611 generic.go:334] "Generic (PLEG): container finished" podID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerID="d5ea64182e222b86d03bec8d65c4895a672db3994a8e501fa578a654ab91a511" exitCode=0 Sep 29 13:18:02 crc kubenswrapper[4611]: I0929 13:18:02.821985 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerDied","Data":"d5ea64182e222b86d03bec8d65c4895a672db3994a8e501fa578a654ab91a511"} Sep 29 13:18:03 crc kubenswrapper[4611]: I0929 13:18:03.836701 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerStarted","Data":"d2eeadbf3606b6732e9687acfe18b689856fcbc6386aa15c6217d1315d055db3"} Sep 29 13:18:03 crc kubenswrapper[4611]: I0929 13:18:03.858588 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wcct5" podStartSLOduration=2.380634545 podStartE2EDuration="4.858564651s" podCreationTimestamp="2025-09-29 13:17:59 +0000 UTC" firstStartedPulling="2025-09-29 13:18:00.800214652 +0000 UTC m=+2267.691734258" lastFinishedPulling="2025-09-29 13:18:03.278144758 +0000 UTC m=+2270.169664364" observedRunningTime="2025-09-29 13:18:03.85507028 +0000 UTC m=+2270.746589886" watchObservedRunningTime="2025-09-29 13:18:03.858564651 +0000 UTC m=+2270.750084247" Sep 29 13:18:04 crc kubenswrapper[4611]: I0929 13:18:04.630514 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:18:04 crc kubenswrapper[4611]: I0929 13:18:04.631145 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:18:09 crc kubenswrapper[4611]: I0929 13:18:09.944525 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:09 crc kubenswrapper[4611]: I0929 13:18:09.945237 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:09 crc kubenswrapper[4611]: I0929 13:18:09.997863 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:10 crc kubenswrapper[4611]: I0929 13:18:10.966469 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:11 crc kubenswrapper[4611]: I0929 13:18:11.029054 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcct5"] Sep 29 13:18:12 crc kubenswrapper[4611]: I0929 13:18:12.933509 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wcct5" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="registry-server" containerID="cri-o://d2eeadbf3606b6732e9687acfe18b689856fcbc6386aa15c6217d1315d055db3" gracePeriod=2 Sep 29 13:18:13 crc kubenswrapper[4611]: I0929 13:18:13.956519 4611 generic.go:334] "Generic (PLEG): container finished" podID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerID="d2eeadbf3606b6732e9687acfe18b689856fcbc6386aa15c6217d1315d055db3" exitCode=0 Sep 29 13:18:13 crc kubenswrapper[4611]: I0929 13:18:13.956596 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerDied","Data":"d2eeadbf3606b6732e9687acfe18b689856fcbc6386aa15c6217d1315d055db3"} Sep 29 13:18:13 crc kubenswrapper[4611]: I0929 13:18:13.957052 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcct5" event={"ID":"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1","Type":"ContainerDied","Data":"e4f2c5657c905127d863e3bcd4b52f18dd74476c2356dd4749903a31a756dcaa"} Sep 29 13:18:13 crc kubenswrapper[4611]: I0929 13:18:13.957076 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4f2c5657c905127d863e3bcd4b52f18dd74476c2356dd4749903a31a756dcaa" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.007711 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.105352 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwvdx\" (UniqueName: \"kubernetes.io/projected/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-kube-api-access-vwvdx\") pod \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.105709 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-catalog-content\") pod \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.105896 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-utilities\") pod \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\" (UID: \"2f072146-f8d5-4d07-9b1c-2d4fdbda0be1\") " Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.107417 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-utilities" (OuterVolumeSpecName: "utilities") pod "2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" (UID: "2f072146-f8d5-4d07-9b1c-2d4fdbda0be1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.117339 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-kube-api-access-vwvdx" (OuterVolumeSpecName: "kube-api-access-vwvdx") pod "2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" (UID: "2f072146-f8d5-4d07-9b1c-2d4fdbda0be1"). InnerVolumeSpecName "kube-api-access-vwvdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.120926 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" (UID: "2f072146-f8d5-4d07-9b1c-2d4fdbda0be1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.208917 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.208958 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.208970 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwvdx\" (UniqueName: \"kubernetes.io/projected/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1-kube-api-access-vwvdx\") on node \"crc\" DevicePath \"\"" Sep 29 13:18:14 crc kubenswrapper[4611]: I0929 13:18:14.969823 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcct5" Sep 29 13:18:15 crc kubenswrapper[4611]: I0929 13:18:15.010378 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcct5"] Sep 29 13:18:15 crc kubenswrapper[4611]: I0929 13:18:15.020425 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcct5"] Sep 29 13:18:15 crc kubenswrapper[4611]: I0929 13:18:15.750498 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" path="/var/lib/kubelet/pods/2f072146-f8d5-4d07-9b1c-2d4fdbda0be1/volumes" Sep 29 13:18:34 crc kubenswrapper[4611]: I0929 13:18:34.629132 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:18:34 crc kubenswrapper[4611]: I0929 13:18:34.630226 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:19:04 crc kubenswrapper[4611]: I0929 13:19:04.628675 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:19:04 crc kubenswrapper[4611]: I0929 13:19:04.629433 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:19:04 crc kubenswrapper[4611]: I0929 13:19:04.629500 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:19:04 crc kubenswrapper[4611]: I0929 13:19:04.630601 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:19:04 crc kubenswrapper[4611]: I0929 13:19:04.630707 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" gracePeriod=600 Sep 29 13:19:04 crc kubenswrapper[4611]: E0929 13:19:04.760386 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:19:05 crc kubenswrapper[4611]: I0929 13:19:05.540939 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" exitCode=0 Sep 29 13:19:05 crc kubenswrapper[4611]: I0929 13:19:05.541123 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726"} Sep 29 13:19:05 crc kubenswrapper[4611]: I0929 13:19:05.541364 4611 scope.go:117] "RemoveContainer" containerID="07a8b1ce6659f69a7f8378fefa7226292ee4c44508f137640082c9911fcec9a1" Sep 29 13:19:05 crc kubenswrapper[4611]: I0929 13:19:05.542136 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:19:05 crc kubenswrapper[4611]: E0929 13:19:05.542509 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.584584 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-brnh8"] Sep 29 13:19:11 crc kubenswrapper[4611]: E0929 13:19:11.585860 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="registry-server" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.585882 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="registry-server" Sep 29 13:19:11 crc kubenswrapper[4611]: E0929 13:19:11.585902 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="extract-content" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.585911 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="extract-content" Sep 29 13:19:11 crc kubenswrapper[4611]: E0929 13:19:11.585934 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="extract-utilities" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.585943 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="extract-utilities" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.586227 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f072146-f8d5-4d07-9b1c-2d4fdbda0be1" containerName="registry-server" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.588131 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.601348 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4rpw\" (UniqueName: \"kubernetes.io/projected/a4f0d5ae-9746-409c-bf49-78775a999489-kube-api-access-l4rpw\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.601589 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-catalog-content\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.601761 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-utilities\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.621249 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-brnh8"] Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.703382 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4rpw\" (UniqueName: \"kubernetes.io/projected/a4f0d5ae-9746-409c-bf49-78775a999489-kube-api-access-l4rpw\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.703509 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-catalog-content\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.703551 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-utilities\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.704294 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-utilities\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.704387 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-catalog-content\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.730765 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4rpw\" (UniqueName: \"kubernetes.io/projected/a4f0d5ae-9746-409c-bf49-78775a999489-kube-api-access-l4rpw\") pod \"redhat-operators-brnh8\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:11 crc kubenswrapper[4611]: I0929 13:19:11.920827 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:12 crc kubenswrapper[4611]: I0929 13:19:12.463168 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-brnh8"] Sep 29 13:19:12 crc kubenswrapper[4611]: I0929 13:19:12.620932 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerStarted","Data":"0ad0f9fb9025fcf7a1da7611c64b5a0986d798c954b6bd6e951f27e2dad2720b"} Sep 29 13:19:13 crc kubenswrapper[4611]: I0929 13:19:13.633599 4611 generic.go:334] "Generic (PLEG): container finished" podID="a4f0d5ae-9746-409c-bf49-78775a999489" containerID="f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64" exitCode=0 Sep 29 13:19:13 crc kubenswrapper[4611]: I0929 13:19:13.634597 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerDied","Data":"f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64"} Sep 29 13:19:13 crc kubenswrapper[4611]: I0929 13:19:13.635854 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:19:15 crc kubenswrapper[4611]: I0929 13:19:15.663697 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerStarted","Data":"6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8"} Sep 29 13:19:17 crc kubenswrapper[4611]: I0929 13:19:17.739283 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:19:17 crc kubenswrapper[4611]: E0929 13:19:17.740317 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:19:18 crc kubenswrapper[4611]: I0929 13:19:18.994305 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nsk6k"] Sep 29 13:19:18 crc kubenswrapper[4611]: I0929 13:19:18.996850 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.017474 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wzvx\" (UniqueName: \"kubernetes.io/projected/f7af7644-6878-4835-b449-dda583f327e1-kube-api-access-7wzvx\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.018066 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-catalog-content\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.018216 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-utilities\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.029456 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nsk6k"] Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.119835 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-catalog-content\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.119937 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-utilities\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.120024 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wzvx\" (UniqueName: \"kubernetes.io/projected/f7af7644-6878-4835-b449-dda583f327e1-kube-api-access-7wzvx\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.120391 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-catalog-content\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.120956 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-utilities\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.146364 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wzvx\" (UniqueName: \"kubernetes.io/projected/f7af7644-6878-4835-b449-dda583f327e1-kube-api-access-7wzvx\") pod \"certified-operators-nsk6k\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.322679 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.716526 4611 generic.go:334] "Generic (PLEG): container finished" podID="a4f0d5ae-9746-409c-bf49-78775a999489" containerID="6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8" exitCode=0 Sep 29 13:19:19 crc kubenswrapper[4611]: I0929 13:19:19.716650 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerDied","Data":"6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8"} Sep 29 13:19:20 crc kubenswrapper[4611]: I0929 13:19:20.200550 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nsk6k"] Sep 29 13:19:20 crc kubenswrapper[4611]: I0929 13:19:20.732700 4611 generic.go:334] "Generic (PLEG): container finished" podID="f7af7644-6878-4835-b449-dda583f327e1" containerID="c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49" exitCode=0 Sep 29 13:19:20 crc kubenswrapper[4611]: I0929 13:19:20.733231 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerDied","Data":"c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49"} Sep 29 13:19:20 crc kubenswrapper[4611]: I0929 13:19:20.733272 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerStarted","Data":"98d0c65b9ede45025358cdd76fc28cbaf186d2c57bc22d2db9fa6e97ca5ec7e9"} Sep 29 13:19:20 crc kubenswrapper[4611]: I0929 13:19:20.737957 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerStarted","Data":"5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c"} Sep 29 13:19:20 crc kubenswrapper[4611]: I0929 13:19:20.792485 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-brnh8" podStartSLOduration=3.206491736 podStartE2EDuration="9.792455325s" podCreationTimestamp="2025-09-29 13:19:11 +0000 UTC" firstStartedPulling="2025-09-29 13:19:13.635603698 +0000 UTC m=+2340.527123294" lastFinishedPulling="2025-09-29 13:19:20.221567277 +0000 UTC m=+2347.113086883" observedRunningTime="2025-09-29 13:19:20.779956004 +0000 UTC m=+2347.671475610" watchObservedRunningTime="2025-09-29 13:19:20.792455325 +0000 UTC m=+2347.683974941" Sep 29 13:19:21 crc kubenswrapper[4611]: I0929 13:19:21.921377 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:21 crc kubenswrapper[4611]: I0929 13:19:21.923695 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:22 crc kubenswrapper[4611]: I0929 13:19:22.767190 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerStarted","Data":"50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57"} Sep 29 13:19:22 crc kubenswrapper[4611]: I0929 13:19:22.978018 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-brnh8" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="registry-server" probeResult="failure" output=< Sep 29 13:19:22 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:19:22 crc kubenswrapper[4611]: > Sep 29 13:19:23 crc kubenswrapper[4611]: I0929 13:19:23.779825 4611 generic.go:334] "Generic (PLEG): container finished" podID="f7af7644-6878-4835-b449-dda583f327e1" containerID="50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57" exitCode=0 Sep 29 13:19:23 crc kubenswrapper[4611]: I0929 13:19:23.779933 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerDied","Data":"50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57"} Sep 29 13:19:24 crc kubenswrapper[4611]: I0929 13:19:24.794476 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerStarted","Data":"0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12"} Sep 29 13:19:24 crc kubenswrapper[4611]: I0929 13:19:24.822552 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nsk6k" podStartSLOduration=3.325402007 podStartE2EDuration="6.822522925s" podCreationTimestamp="2025-09-29 13:19:18 +0000 UTC" firstStartedPulling="2025-09-29 13:19:20.741573285 +0000 UTC m=+2347.633092891" lastFinishedPulling="2025-09-29 13:19:24.238694203 +0000 UTC m=+2351.130213809" observedRunningTime="2025-09-29 13:19:24.815915194 +0000 UTC m=+2351.707434810" watchObservedRunningTime="2025-09-29 13:19:24.822522925 +0000 UTC m=+2351.714042531" Sep 29 13:19:29 crc kubenswrapper[4611]: I0929 13:19:29.323349 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:29 crc kubenswrapper[4611]: I0929 13:19:29.324240 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:29 crc kubenswrapper[4611]: I0929 13:19:29.382807 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:29 crc kubenswrapper[4611]: I0929 13:19:29.737362 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:19:29 crc kubenswrapper[4611]: E0929 13:19:29.738222 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:19:29 crc kubenswrapper[4611]: I0929 13:19:29.907864 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:29 crc kubenswrapper[4611]: I0929 13:19:29.990494 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nsk6k"] Sep 29 13:19:31 crc kubenswrapper[4611]: I0929 13:19:31.878887 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nsk6k" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="registry-server" containerID="cri-o://0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12" gracePeriod=2 Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.367282 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.455382 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-utilities\") pod \"f7af7644-6878-4835-b449-dda583f327e1\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.455663 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wzvx\" (UniqueName: \"kubernetes.io/projected/f7af7644-6878-4835-b449-dda583f327e1-kube-api-access-7wzvx\") pod \"f7af7644-6878-4835-b449-dda583f327e1\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.455694 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-catalog-content\") pod \"f7af7644-6878-4835-b449-dda583f327e1\" (UID: \"f7af7644-6878-4835-b449-dda583f327e1\") " Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.458249 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-utilities" (OuterVolumeSpecName: "utilities") pod "f7af7644-6878-4835-b449-dda583f327e1" (UID: "f7af7644-6878-4835-b449-dda583f327e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.463265 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7af7644-6878-4835-b449-dda583f327e1-kube-api-access-7wzvx" (OuterVolumeSpecName: "kube-api-access-7wzvx") pod "f7af7644-6878-4835-b449-dda583f327e1" (UID: "f7af7644-6878-4835-b449-dda583f327e1"). InnerVolumeSpecName "kube-api-access-7wzvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.508253 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f7af7644-6878-4835-b449-dda583f327e1" (UID: "f7af7644-6878-4835-b449-dda583f327e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.558610 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.558736 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wzvx\" (UniqueName: \"kubernetes.io/projected/f7af7644-6878-4835-b449-dda583f327e1-kube-api-access-7wzvx\") on node \"crc\" DevicePath \"\"" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.558748 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7af7644-6878-4835-b449-dda583f327e1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.895935 4611 generic.go:334] "Generic (PLEG): container finished" podID="f7af7644-6878-4835-b449-dda583f327e1" containerID="0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12" exitCode=0 Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.896023 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsk6k" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.896028 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerDied","Data":"0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12"} Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.898807 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsk6k" event={"ID":"f7af7644-6878-4835-b449-dda583f327e1","Type":"ContainerDied","Data":"98d0c65b9ede45025358cdd76fc28cbaf186d2c57bc22d2db9fa6e97ca5ec7e9"} Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.898842 4611 scope.go:117] "RemoveContainer" containerID="0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.929663 4611 scope.go:117] "RemoveContainer" containerID="50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57" Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.963783 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nsk6k"] Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.974400 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nsk6k"] Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.981215 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-brnh8" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="registry-server" probeResult="failure" output=< Sep 29 13:19:32 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:19:32 crc kubenswrapper[4611]: > Sep 29 13:19:32 crc kubenswrapper[4611]: I0929 13:19:32.992178 4611 scope.go:117] "RemoveContainer" containerID="c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.034057 4611 scope.go:117] "RemoveContainer" containerID="0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12" Sep 29 13:19:33 crc kubenswrapper[4611]: E0929 13:19:33.034590 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12\": container with ID starting with 0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12 not found: ID does not exist" containerID="0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.034641 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12"} err="failed to get container status \"0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12\": rpc error: code = NotFound desc = could not find container \"0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12\": container with ID starting with 0476ed8408eaf147cfb05da23f5279a8071fdbcaec1b138b7f2bd4568b582a12 not found: ID does not exist" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.034667 4611 scope.go:117] "RemoveContainer" containerID="50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57" Sep 29 13:19:33 crc kubenswrapper[4611]: E0929 13:19:33.034932 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57\": container with ID starting with 50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57 not found: ID does not exist" containerID="50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.034954 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57"} err="failed to get container status \"50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57\": rpc error: code = NotFound desc = could not find container \"50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57\": container with ID starting with 50638f22f4badfc8d3c603ee172c8c4f02a5522a370743a2898e85127e914d57 not found: ID does not exist" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.034969 4611 scope.go:117] "RemoveContainer" containerID="c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49" Sep 29 13:19:33 crc kubenswrapper[4611]: E0929 13:19:33.035235 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49\": container with ID starting with c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49 not found: ID does not exist" containerID="c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.035260 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49"} err="failed to get container status \"c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49\": rpc error: code = NotFound desc = could not find container \"c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49\": container with ID starting with c25a032ed7b3b9f7031a5b57eb11c85597719aa2425d893f6f6a87020ab17f49 not found: ID does not exist" Sep 29 13:19:33 crc kubenswrapper[4611]: I0929 13:19:33.750768 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7af7644-6878-4835-b449-dda583f327e1" path="/var/lib/kubelet/pods/f7af7644-6878-4835-b449-dda583f327e1/volumes" Sep 29 13:19:41 crc kubenswrapper[4611]: I0929 13:19:41.994188 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:42 crc kubenswrapper[4611]: I0929 13:19:42.082792 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:42 crc kubenswrapper[4611]: I0929 13:19:42.788872 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-brnh8"] Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.060387 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-brnh8" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="registry-server" containerID="cri-o://5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c" gracePeriod=2 Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.550932 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.691874 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-catalog-content\") pod \"a4f0d5ae-9746-409c-bf49-78775a999489\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.692133 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4rpw\" (UniqueName: \"kubernetes.io/projected/a4f0d5ae-9746-409c-bf49-78775a999489-kube-api-access-l4rpw\") pod \"a4f0d5ae-9746-409c-bf49-78775a999489\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.692239 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-utilities\") pod \"a4f0d5ae-9746-409c-bf49-78775a999489\" (UID: \"a4f0d5ae-9746-409c-bf49-78775a999489\") " Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.693409 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-utilities" (OuterVolumeSpecName: "utilities") pod "a4f0d5ae-9746-409c-bf49-78775a999489" (UID: "a4f0d5ae-9746-409c-bf49-78775a999489"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.702953 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4f0d5ae-9746-409c-bf49-78775a999489-kube-api-access-l4rpw" (OuterVolumeSpecName: "kube-api-access-l4rpw") pod "a4f0d5ae-9746-409c-bf49-78775a999489" (UID: "a4f0d5ae-9746-409c-bf49-78775a999489"). InnerVolumeSpecName "kube-api-access-l4rpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.795647 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4rpw\" (UniqueName: \"kubernetes.io/projected/a4f0d5ae-9746-409c-bf49-78775a999489-kube-api-access-l4rpw\") on node \"crc\" DevicePath \"\"" Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.795691 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.802444 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4f0d5ae-9746-409c-bf49-78775a999489" (UID: "a4f0d5ae-9746-409c-bf49-78775a999489"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:19:43 crc kubenswrapper[4611]: I0929 13:19:43.898496 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4f0d5ae-9746-409c-bf49-78775a999489-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.074487 4611 generic.go:334] "Generic (PLEG): container finished" podID="a4f0d5ae-9746-409c-bf49-78775a999489" containerID="5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c" exitCode=0 Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.074568 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerDied","Data":"5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c"} Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.074644 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brnh8" event={"ID":"a4f0d5ae-9746-409c-bf49-78775a999489","Type":"ContainerDied","Data":"0ad0f9fb9025fcf7a1da7611c64b5a0986d798c954b6bd6e951f27e2dad2720b"} Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.074714 4611 scope.go:117] "RemoveContainer" containerID="5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.074919 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brnh8" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.107735 4611 scope.go:117] "RemoveContainer" containerID="6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.120800 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-brnh8"] Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.150478 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-brnh8"] Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.155815 4611 scope.go:117] "RemoveContainer" containerID="f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.202323 4611 scope.go:117] "RemoveContainer" containerID="5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c" Sep 29 13:19:44 crc kubenswrapper[4611]: E0929 13:19:44.203367 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c\": container with ID starting with 5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c not found: ID does not exist" containerID="5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.203422 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c"} err="failed to get container status \"5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c\": rpc error: code = NotFound desc = could not find container \"5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c\": container with ID starting with 5d5737060c6366ca85ea16665262a32ffac8b8a47a1a8a4ebcb98ace1f554d8c not found: ID does not exist" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.204437 4611 scope.go:117] "RemoveContainer" containerID="6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8" Sep 29 13:19:44 crc kubenswrapper[4611]: E0929 13:19:44.205181 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8\": container with ID starting with 6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8 not found: ID does not exist" containerID="6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.205224 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8"} err="failed to get container status \"6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8\": rpc error: code = NotFound desc = could not find container \"6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8\": container with ID starting with 6d4a884587956f0d01cfe086dece311c9ee4c2e098d99a4912914f723239aae8 not found: ID does not exist" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.205255 4611 scope.go:117] "RemoveContainer" containerID="f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64" Sep 29 13:19:44 crc kubenswrapper[4611]: E0929 13:19:44.205558 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64\": container with ID starting with f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64 not found: ID does not exist" containerID="f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.205580 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64"} err="failed to get container status \"f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64\": rpc error: code = NotFound desc = could not find container \"f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64\": container with ID starting with f8c513889a9809267c0ac33ccf940f83a8af0706fc65ff62e1258ab896741e64 not found: ID does not exist" Sep 29 13:19:44 crc kubenswrapper[4611]: I0929 13:19:44.736693 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:19:44 crc kubenswrapper[4611]: E0929 13:19:44.737280 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:19:45 crc kubenswrapper[4611]: I0929 13:19:45.754817 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" path="/var/lib/kubelet/pods/a4f0d5ae-9746-409c-bf49-78775a999489/volumes" Sep 29 13:19:57 crc kubenswrapper[4611]: I0929 13:19:57.737072 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:19:57 crc kubenswrapper[4611]: E0929 13:19:57.738013 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:20:11 crc kubenswrapper[4611]: I0929 13:20:11.737808 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:20:11 crc kubenswrapper[4611]: E0929 13:20:11.738865 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:20:24 crc kubenswrapper[4611]: I0929 13:20:24.736247 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:20:24 crc kubenswrapper[4611]: E0929 13:20:24.737134 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:20:35 crc kubenswrapper[4611]: I0929 13:20:35.740433 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:20:35 crc kubenswrapper[4611]: E0929 13:20:35.741480 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:20:47 crc kubenswrapper[4611]: I0929 13:20:47.736928 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:20:47 crc kubenswrapper[4611]: E0929 13:20:47.737999 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:20:58 crc kubenswrapper[4611]: I0929 13:20:58.736325 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:20:58 crc kubenswrapper[4611]: E0929 13:20:58.738107 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:21:11 crc kubenswrapper[4611]: I0929 13:21:11.736934 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:21:11 crc kubenswrapper[4611]: E0929 13:21:11.737654 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:21:26 crc kubenswrapper[4611]: I0929 13:21:26.736611 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:21:26 crc kubenswrapper[4611]: E0929 13:21:26.737471 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:21:41 crc kubenswrapper[4611]: I0929 13:21:41.737152 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:21:41 crc kubenswrapper[4611]: E0929 13:21:41.738021 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:21:53 crc kubenswrapper[4611]: I0929 13:21:53.743274 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:21:53 crc kubenswrapper[4611]: E0929 13:21:53.744141 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:22:06 crc kubenswrapper[4611]: I0929 13:22:06.736962 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:22:06 crc kubenswrapper[4611]: E0929 13:22:06.737604 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:22:18 crc kubenswrapper[4611]: I0929 13:22:18.854745 4611 generic.go:334] "Generic (PLEG): container finished" podID="678b329e-0ba8-4901-94e3-51738d9317c0" containerID="8d55342082266f720dbbce13ced879d995d5b6122db0b3b834a749cc503bd744" exitCode=0 Sep 29 13:22:18 crc kubenswrapper[4611]: I0929 13:22:18.854816 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" event={"ID":"678b329e-0ba8-4901-94e3-51738d9317c0","Type":"ContainerDied","Data":"8d55342082266f720dbbce13ced879d995d5b6122db0b3b834a749cc503bd744"} Sep 29 13:22:19 crc kubenswrapper[4611]: I0929 13:22:19.736602 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:22:19 crc kubenswrapper[4611]: E0929 13:22:19.737174 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.335823 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.471725 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jtkh\" (UniqueName: \"kubernetes.io/projected/678b329e-0ba8-4901-94e3-51738d9317c0-kube-api-access-5jtkh\") pod \"678b329e-0ba8-4901-94e3-51738d9317c0\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.471842 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-secret-0\") pod \"678b329e-0ba8-4901-94e3-51738d9317c0\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.471872 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-inventory\") pod \"678b329e-0ba8-4901-94e3-51738d9317c0\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.472701 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-combined-ca-bundle\") pod \"678b329e-0ba8-4901-94e3-51738d9317c0\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.472727 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-ssh-key\") pod \"678b329e-0ba8-4901-94e3-51738d9317c0\" (UID: \"678b329e-0ba8-4901-94e3-51738d9317c0\") " Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.482034 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/678b329e-0ba8-4901-94e3-51738d9317c0-kube-api-access-5jtkh" (OuterVolumeSpecName: "kube-api-access-5jtkh") pod "678b329e-0ba8-4901-94e3-51738d9317c0" (UID: "678b329e-0ba8-4901-94e3-51738d9317c0"). InnerVolumeSpecName "kube-api-access-5jtkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.482848 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "678b329e-0ba8-4901-94e3-51738d9317c0" (UID: "678b329e-0ba8-4901-94e3-51738d9317c0"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.501139 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "678b329e-0ba8-4901-94e3-51738d9317c0" (UID: "678b329e-0ba8-4901-94e3-51738d9317c0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.503267 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "678b329e-0ba8-4901-94e3-51738d9317c0" (UID: "678b329e-0ba8-4901-94e3-51738d9317c0"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.515513 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-inventory" (OuterVolumeSpecName: "inventory") pod "678b329e-0ba8-4901-94e3-51738d9317c0" (UID: "678b329e-0ba8-4901-94e3-51738d9317c0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.574913 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jtkh\" (UniqueName: \"kubernetes.io/projected/678b329e-0ba8-4901-94e3-51738d9317c0-kube-api-access-5jtkh\") on node \"crc\" DevicePath \"\"" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.574952 4611 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.574965 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.574978 4611 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.574990 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/678b329e-0ba8-4901-94e3-51738d9317c0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.924144 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" event={"ID":"678b329e-0ba8-4901-94e3-51738d9317c0","Type":"ContainerDied","Data":"8a6fd3bc1aede466f7f458f0dd2d32b5bc79e2f6f4c6b77398d687b9bf4594bd"} Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.924464 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a6fd3bc1aede466f7f458f0dd2d32b5bc79e2f6f4c6b77398d687b9bf4594bd" Sep 29 13:22:20 crc kubenswrapper[4611]: I0929 13:22:20.924523 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-rhthx" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085076 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd"] Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085484 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="extract-utilities" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085504 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="extract-utilities" Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085531 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="registry-server" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085540 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="registry-server" Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085561 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="extract-content" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085569 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="extract-content" Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085589 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="678b329e-0ba8-4901-94e3-51738d9317c0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085599 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="678b329e-0ba8-4901-94e3-51738d9317c0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085641 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="extract-utilities" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085651 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="extract-utilities" Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085668 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="extract-content" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085677 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="extract-content" Sep 29 13:22:21 crc kubenswrapper[4611]: E0929 13:22:21.085691 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="registry-server" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085701 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="registry-server" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085918 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4f0d5ae-9746-409c-bf49-78775a999489" containerName="registry-server" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085951 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7af7644-6878-4835-b449-dda583f327e1" containerName="registry-server" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.085967 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="678b329e-0ba8-4901-94e3-51738d9317c0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.086756 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.088949 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.089309 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.089436 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.089610 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.089620 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.090513 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.091302 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.110090 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd"] Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.186157 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.186533 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.186721 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.186912 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v67pc\" (UniqueName: \"kubernetes.io/projected/f9d9958c-2b92-4742-b36c-eaef389b07c5-kube-api-access-v67pc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.187036 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.187154 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.187281 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.187389 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.187506 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289583 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289706 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289748 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289784 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v67pc\" (UniqueName: \"kubernetes.io/projected/f9d9958c-2b92-4742-b36c-eaef389b07c5-kube-api-access-v67pc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289823 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289863 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289911 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289941 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.289986 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.291878 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.294595 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.295536 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.297965 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.298148 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.298917 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.300502 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.301348 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.327764 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v67pc\" (UniqueName: \"kubernetes.io/projected/f9d9958c-2b92-4742-b36c-eaef389b07c5-kube-api-access-v67pc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hggrd\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.403254 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:22:21 crc kubenswrapper[4611]: I0929 13:22:21.931520 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd"] Sep 29 13:22:22 crc kubenswrapper[4611]: I0929 13:22:22.954325 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" event={"ID":"f9d9958c-2b92-4742-b36c-eaef389b07c5","Type":"ContainerStarted","Data":"79ca55f1176592740ab3ab775372d3f23665364f1d8b245f574776484114d4db"} Sep 29 13:22:22 crc kubenswrapper[4611]: I0929 13:22:22.954781 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" event={"ID":"f9d9958c-2b92-4742-b36c-eaef389b07c5","Type":"ContainerStarted","Data":"c585bf8fd0d7e5d0ea6be8282fcc43a5013da2a6c6da69147d0c7331436a47f0"} Sep 29 13:22:22 crc kubenswrapper[4611]: I0929 13:22:22.975341 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" podStartSLOduration=1.349764861 podStartE2EDuration="1.975325282s" podCreationTimestamp="2025-09-29 13:22:21 +0000 UTC" firstStartedPulling="2025-09-29 13:22:21.9428479 +0000 UTC m=+2528.834367506" lastFinishedPulling="2025-09-29 13:22:22.568408291 +0000 UTC m=+2529.459927927" observedRunningTime="2025-09-29 13:22:22.969399171 +0000 UTC m=+2529.860918777" watchObservedRunningTime="2025-09-29 13:22:22.975325282 +0000 UTC m=+2529.866844888" Sep 29 13:22:32 crc kubenswrapper[4611]: I0929 13:22:32.736810 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:22:32 crc kubenswrapper[4611]: E0929 13:22:32.737709 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:22:45 crc kubenswrapper[4611]: I0929 13:22:45.741251 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:22:45 crc kubenswrapper[4611]: E0929 13:22:45.742111 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:23:00 crc kubenswrapper[4611]: I0929 13:23:00.737361 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:23:00 crc kubenswrapper[4611]: E0929 13:23:00.738807 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:23:13 crc kubenswrapper[4611]: I0929 13:23:13.743404 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:23:13 crc kubenswrapper[4611]: E0929 13:23:13.744150 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:23:27 crc kubenswrapper[4611]: I0929 13:23:27.736900 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:23:27 crc kubenswrapper[4611]: E0929 13:23:27.737955 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:23:41 crc kubenswrapper[4611]: I0929 13:23:41.736755 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:23:41 crc kubenswrapper[4611]: E0929 13:23:41.737646 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:23:56 crc kubenswrapper[4611]: I0929 13:23:56.737243 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:23:56 crc kubenswrapper[4611]: E0929 13:23:56.738047 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:24:11 crc kubenswrapper[4611]: I0929 13:24:11.736608 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:24:12 crc kubenswrapper[4611]: I0929 13:24:12.069637 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"bdf11d9c6f683907844a1ddba734a5a1556ae365384489865ce074aef18eaf22"} Sep 29 13:24:36 crc kubenswrapper[4611]: I0929 13:24:36.867194 4611 scope.go:117] "RemoveContainer" containerID="8eab6350a162b81a6ebdca51b4dd15a2e8aded873a340d837985ec201105a27b" Sep 29 13:24:36 crc kubenswrapper[4611]: I0929 13:24:36.897978 4611 scope.go:117] "RemoveContainer" containerID="d5ea64182e222b86d03bec8d65c4895a672db3994a8e501fa578a654ab91a511" Sep 29 13:24:36 crc kubenswrapper[4611]: I0929 13:24:36.937916 4611 scope.go:117] "RemoveContainer" containerID="d2eeadbf3606b6732e9687acfe18b689856fcbc6386aa15c6217d1315d055db3" Sep 29 13:24:55 crc kubenswrapper[4611]: I0929 13:24:55.928803 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-54fd444d4f-vmksq" podUID="cf23ea05-4538-4fed-bb3d-07d009f400bd" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.355665 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x7grs"] Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.358694 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.366251 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x7grs"] Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.549085 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vmbs\" (UniqueName: \"kubernetes.io/projected/57297240-6543-46cc-8dfd-14704f6f504d-kube-api-access-4vmbs\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.549202 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-utilities\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.549251 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-catalog-content\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.651522 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vmbs\" (UniqueName: \"kubernetes.io/projected/57297240-6543-46cc-8dfd-14704f6f504d-kube-api-access-4vmbs\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.651750 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-utilities\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.651802 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-catalog-content\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.652458 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-catalog-content\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.653329 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-utilities\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.689645 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vmbs\" (UniqueName: \"kubernetes.io/projected/57297240-6543-46cc-8dfd-14704f6f504d-kube-api-access-4vmbs\") pod \"community-operators-x7grs\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:16 crc kubenswrapper[4611]: I0929 13:25:16.725019 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:17 crc kubenswrapper[4611]: I0929 13:25:17.274595 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x7grs"] Sep 29 13:25:17 crc kubenswrapper[4611]: W0929 13:25:17.284041 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57297240_6543_46cc_8dfd_14704f6f504d.slice/crio-cbef58ca254b95c8913da91d7ca2035a8d8fef936fa7d455898d5af56edaabf7 WatchSource:0}: Error finding container cbef58ca254b95c8913da91d7ca2035a8d8fef936fa7d455898d5af56edaabf7: Status 404 returned error can't find the container with id cbef58ca254b95c8913da91d7ca2035a8d8fef936fa7d455898d5af56edaabf7 Sep 29 13:25:17 crc kubenswrapper[4611]: I0929 13:25:17.705421 4611 generic.go:334] "Generic (PLEG): container finished" podID="57297240-6543-46cc-8dfd-14704f6f504d" containerID="9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff" exitCode=0 Sep 29 13:25:17 crc kubenswrapper[4611]: I0929 13:25:17.705518 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x7grs" event={"ID":"57297240-6543-46cc-8dfd-14704f6f504d","Type":"ContainerDied","Data":"9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff"} Sep 29 13:25:17 crc kubenswrapper[4611]: I0929 13:25:17.705653 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x7grs" event={"ID":"57297240-6543-46cc-8dfd-14704f6f504d","Type":"ContainerStarted","Data":"cbef58ca254b95c8913da91d7ca2035a8d8fef936fa7d455898d5af56edaabf7"} Sep 29 13:25:17 crc kubenswrapper[4611]: I0929 13:25:17.708926 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:25:19 crc kubenswrapper[4611]: I0929 13:25:19.725598 4611 generic.go:334] "Generic (PLEG): container finished" podID="57297240-6543-46cc-8dfd-14704f6f504d" containerID="6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4" exitCode=0 Sep 29 13:25:19 crc kubenswrapper[4611]: I0929 13:25:19.725743 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x7grs" event={"ID":"57297240-6543-46cc-8dfd-14704f6f504d","Type":"ContainerDied","Data":"6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4"} Sep 29 13:25:20 crc kubenswrapper[4611]: I0929 13:25:20.752666 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x7grs" event={"ID":"57297240-6543-46cc-8dfd-14704f6f504d","Type":"ContainerStarted","Data":"a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029"} Sep 29 13:25:20 crc kubenswrapper[4611]: I0929 13:25:20.788346 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x7grs" podStartSLOduration=2.253131248 podStartE2EDuration="4.788329474s" podCreationTimestamp="2025-09-29 13:25:16 +0000 UTC" firstStartedPulling="2025-09-29 13:25:17.708533047 +0000 UTC m=+2704.600052663" lastFinishedPulling="2025-09-29 13:25:20.243731283 +0000 UTC m=+2707.135250889" observedRunningTime="2025-09-29 13:25:20.786907763 +0000 UTC m=+2707.678427409" watchObservedRunningTime="2025-09-29 13:25:20.788329474 +0000 UTC m=+2707.679849080" Sep 29 13:25:26 crc kubenswrapper[4611]: I0929 13:25:26.725871 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:26 crc kubenswrapper[4611]: I0929 13:25:26.726515 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:26 crc kubenswrapper[4611]: I0929 13:25:26.790613 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:26 crc kubenswrapper[4611]: I0929 13:25:26.871911 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:27 crc kubenswrapper[4611]: I0929 13:25:27.040692 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x7grs"] Sep 29 13:25:28 crc kubenswrapper[4611]: I0929 13:25:28.824253 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x7grs" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="registry-server" containerID="cri-o://a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029" gracePeriod=2 Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.278241 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.403517 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-utilities\") pod \"57297240-6543-46cc-8dfd-14704f6f504d\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.403839 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-catalog-content\") pod \"57297240-6543-46cc-8dfd-14704f6f504d\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.403895 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vmbs\" (UniqueName: \"kubernetes.io/projected/57297240-6543-46cc-8dfd-14704f6f504d-kube-api-access-4vmbs\") pod \"57297240-6543-46cc-8dfd-14704f6f504d\" (UID: \"57297240-6543-46cc-8dfd-14704f6f504d\") " Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.404891 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-utilities" (OuterVolumeSpecName: "utilities") pod "57297240-6543-46cc-8dfd-14704f6f504d" (UID: "57297240-6543-46cc-8dfd-14704f6f504d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.412379 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57297240-6543-46cc-8dfd-14704f6f504d-kube-api-access-4vmbs" (OuterVolumeSpecName: "kube-api-access-4vmbs") pod "57297240-6543-46cc-8dfd-14704f6f504d" (UID: "57297240-6543-46cc-8dfd-14704f6f504d"). InnerVolumeSpecName "kube-api-access-4vmbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.461349 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57297240-6543-46cc-8dfd-14704f6f504d" (UID: "57297240-6543-46cc-8dfd-14704f6f504d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.506659 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.506698 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vmbs\" (UniqueName: \"kubernetes.io/projected/57297240-6543-46cc-8dfd-14704f6f504d-kube-api-access-4vmbs\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.506715 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57297240-6543-46cc-8dfd-14704f6f504d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.837278 4611 generic.go:334] "Generic (PLEG): container finished" podID="57297240-6543-46cc-8dfd-14704f6f504d" containerID="a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029" exitCode=0 Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.837334 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x7grs" event={"ID":"57297240-6543-46cc-8dfd-14704f6f504d","Type":"ContainerDied","Data":"a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029"} Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.837386 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x7grs" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.837415 4611 scope.go:117] "RemoveContainer" containerID="a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.837396 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x7grs" event={"ID":"57297240-6543-46cc-8dfd-14704f6f504d","Type":"ContainerDied","Data":"cbef58ca254b95c8913da91d7ca2035a8d8fef936fa7d455898d5af56edaabf7"} Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.872891 4611 scope.go:117] "RemoveContainer" containerID="6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.896788 4611 scope.go:117] "RemoveContainer" containerID="9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.897391 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x7grs"] Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.906978 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x7grs"] Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.933060 4611 scope.go:117] "RemoveContainer" containerID="a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029" Sep 29 13:25:29 crc kubenswrapper[4611]: E0929 13:25:29.933458 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029\": container with ID starting with a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029 not found: ID does not exist" containerID="a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.933486 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029"} err="failed to get container status \"a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029\": rpc error: code = NotFound desc = could not find container \"a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029\": container with ID starting with a87e0c832e49e9a4190bee6ae127a5bef3002d817fc0a055879cd26bd3d5a029 not found: ID does not exist" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.933517 4611 scope.go:117] "RemoveContainer" containerID="6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4" Sep 29 13:25:29 crc kubenswrapper[4611]: E0929 13:25:29.933763 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4\": container with ID starting with 6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4 not found: ID does not exist" containerID="6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.933787 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4"} err="failed to get container status \"6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4\": rpc error: code = NotFound desc = could not find container \"6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4\": container with ID starting with 6c67a2685e27cff2af81a234af1f6056e498f0559f4dff5169f5f4fbb05a27e4 not found: ID does not exist" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.933801 4611 scope.go:117] "RemoveContainer" containerID="9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff" Sep 29 13:25:29 crc kubenswrapper[4611]: E0929 13:25:29.934077 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff\": container with ID starting with 9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff not found: ID does not exist" containerID="9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff" Sep 29 13:25:29 crc kubenswrapper[4611]: I0929 13:25:29.934096 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff"} err="failed to get container status \"9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff\": rpc error: code = NotFound desc = could not find container \"9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff\": container with ID starting with 9c74c5f91379baa2a5d373ec623965c4d16675c249f46f8c8102ad2cd59845ff not found: ID does not exist" Sep 29 13:25:31 crc kubenswrapper[4611]: I0929 13:25:31.750313 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57297240-6543-46cc-8dfd-14704f6f504d" path="/var/lib/kubelet/pods/57297240-6543-46cc-8dfd-14704f6f504d/volumes" Sep 29 13:25:49 crc kubenswrapper[4611]: I0929 13:25:49.029842 4611 generic.go:334] "Generic (PLEG): container finished" podID="f9d9958c-2b92-4742-b36c-eaef389b07c5" containerID="79ca55f1176592740ab3ab775372d3f23665364f1d8b245f574776484114d4db" exitCode=0 Sep 29 13:25:49 crc kubenswrapper[4611]: I0929 13:25:49.030043 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" event={"ID":"f9d9958c-2b92-4742-b36c-eaef389b07c5","Type":"ContainerDied","Data":"79ca55f1176592740ab3ab775372d3f23665364f1d8b245f574776484114d4db"} Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.548819 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619358 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v67pc\" (UniqueName: \"kubernetes.io/projected/f9d9958c-2b92-4742-b36c-eaef389b07c5-kube-api-access-v67pc\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619469 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-inventory\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619544 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-1\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619601 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-extra-config-0\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619668 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-combined-ca-bundle\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619692 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-0\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619723 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-ssh-key\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619790 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-0\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.619834 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-1\") pod \"f9d9958c-2b92-4742-b36c-eaef389b07c5\" (UID: \"f9d9958c-2b92-4742-b36c-eaef389b07c5\") " Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.626975 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9d9958c-2b92-4742-b36c-eaef389b07c5-kube-api-access-v67pc" (OuterVolumeSpecName: "kube-api-access-v67pc") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "kube-api-access-v67pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.638317 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.649407 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.652176 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.656126 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.657802 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.665899 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-inventory" (OuterVolumeSpecName: "inventory") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.671044 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.677127 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f9d9958c-2b92-4742-b36c-eaef389b07c5" (UID: "f9d9958c-2b92-4742-b36c-eaef389b07c5"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721733 4611 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721776 4611 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721789 4611 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721805 4611 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721819 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721832 4611 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721843 4611 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721858 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v67pc\" (UniqueName: \"kubernetes.io/projected/f9d9958c-2b92-4742-b36c-eaef389b07c5-kube-api-access-v67pc\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:50 crc kubenswrapper[4611]: I0929 13:25:50.721870 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9d9958c-2b92-4742-b36c-eaef389b07c5-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.052904 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" event={"ID":"f9d9958c-2b92-4742-b36c-eaef389b07c5","Type":"ContainerDied","Data":"c585bf8fd0d7e5d0ea6be8282fcc43a5013da2a6c6da69147d0c7331436a47f0"} Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.053285 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c585bf8fd0d7e5d0ea6be8282fcc43a5013da2a6c6da69147d0c7331436a47f0" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.052993 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hggrd" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.182753 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc"] Sep 29 13:25:51 crc kubenswrapper[4611]: E0929 13:25:51.183223 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="registry-server" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.183243 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="registry-server" Sep 29 13:25:51 crc kubenswrapper[4611]: E0929 13:25:51.183290 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9d9958c-2b92-4742-b36c-eaef389b07c5" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.183298 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9d9958c-2b92-4742-b36c-eaef389b07c5" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 13:25:51 crc kubenswrapper[4611]: E0929 13:25:51.183312 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="extract-content" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.183320 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="extract-content" Sep 29 13:25:51 crc kubenswrapper[4611]: E0929 13:25:51.183335 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="extract-utilities" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.183343 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="extract-utilities" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.183615 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="57297240-6543-46cc-8dfd-14704f6f504d" containerName="registry-server" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.183660 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9d9958c-2b92-4742-b36c-eaef389b07c5" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.184641 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.192739 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgcxz" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.192960 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.193152 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.193319 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.193453 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.193830 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc"] Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.233840 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnfpt\" (UniqueName: \"kubernetes.io/projected/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-kube-api-access-hnfpt\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.233987 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234046 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234100 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234216 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-3\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234281 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234324 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234368 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.234419 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.336284 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.336390 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337338 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337387 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-3\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337442 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337471 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337500 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337604 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.337783 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnfpt\" (UniqueName: \"kubernetes.io/projected/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-kube-api-access-hnfpt\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.341327 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.341811 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.342013 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.343112 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.343890 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.344349 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-3\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.342421 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.351242 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.359755 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnfpt\" (UniqueName: \"kubernetes.io/projected/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-kube-api-access-hnfpt\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:51 crc kubenswrapper[4611]: I0929 13:25:51.512229 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:25:52 crc kubenswrapper[4611]: I0929 13:25:52.049120 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc"] Sep 29 13:25:52 crc kubenswrapper[4611]: I0929 13:25:52.065131 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" event={"ID":"0cacd54c-23e1-40c3-963a-33bd7c91a0ad","Type":"ContainerStarted","Data":"24169375dfa8873800956cbae230ed4c9ddbcd46dd9c03ce1d877492aa36f9a6"} Sep 29 13:25:55 crc kubenswrapper[4611]: I0929 13:25:55.107592 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" event={"ID":"0cacd54c-23e1-40c3-963a-33bd7c91a0ad","Type":"ContainerStarted","Data":"3e22a04754fd2164c51e38093d7eff1e52f3185867c758d7dc8b8789c98735bf"} Sep 29 13:25:55 crc kubenswrapper[4611]: I0929 13:25:55.136657 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" podStartSLOduration=2.468148464 podStartE2EDuration="4.136590689s" podCreationTimestamp="2025-09-29 13:25:51 +0000 UTC" firstStartedPulling="2025-09-29 13:25:52.055245216 +0000 UTC m=+2738.946764832" lastFinishedPulling="2025-09-29 13:25:53.723687411 +0000 UTC m=+2740.615207057" observedRunningTime="2025-09-29 13:25:55.128696271 +0000 UTC m=+2742.020215887" watchObservedRunningTime="2025-09-29 13:25:55.136590689 +0000 UTC m=+2742.028110325" Sep 29 13:26:34 crc kubenswrapper[4611]: I0929 13:26:34.628458 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:26:34 crc kubenswrapper[4611]: I0929 13:26:34.629118 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:27:04 crc kubenswrapper[4611]: I0929 13:27:04.628580 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:27:04 crc kubenswrapper[4611]: I0929 13:27:04.629199 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:27:34 crc kubenswrapper[4611]: I0929 13:27:34.628529 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:27:34 crc kubenswrapper[4611]: I0929 13:27:34.629742 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:27:34 crc kubenswrapper[4611]: I0929 13:27:34.629805 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:27:34 crc kubenswrapper[4611]: I0929 13:27:34.630523 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bdf11d9c6f683907844a1ddba734a5a1556ae365384489865ce074aef18eaf22"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:27:34 crc kubenswrapper[4611]: I0929 13:27:34.630578 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://bdf11d9c6f683907844a1ddba734a5a1556ae365384489865ce074aef18eaf22" gracePeriod=600 Sep 29 13:27:35 crc kubenswrapper[4611]: I0929 13:27:35.117870 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="bdf11d9c6f683907844a1ddba734a5a1556ae365384489865ce074aef18eaf22" exitCode=0 Sep 29 13:27:35 crc kubenswrapper[4611]: I0929 13:27:35.117942 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"bdf11d9c6f683907844a1ddba734a5a1556ae365384489865ce074aef18eaf22"} Sep 29 13:27:35 crc kubenswrapper[4611]: I0929 13:27:35.118175 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c"} Sep 29 13:27:35 crc kubenswrapper[4611]: I0929 13:27:35.118191 4611 scope.go:117] "RemoveContainer" containerID="2bc082f99167674d4730b4d4d80593ec89261b871040a2f3aaa695d42940c726" Sep 29 13:28:23 crc kubenswrapper[4611]: I0929 13:28:23.565112 4611 generic.go:334] "Generic (PLEG): container finished" podID="0cacd54c-23e1-40c3-963a-33bd7c91a0ad" containerID="3e22a04754fd2164c51e38093d7eff1e52f3185867c758d7dc8b8789c98735bf" exitCode=0 Sep 29 13:28:23 crc kubenswrapper[4611]: I0929 13:28:23.565188 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" event={"ID":"0cacd54c-23e1-40c3-963a-33bd7c91a0ad","Type":"ContainerDied","Data":"3e22a04754fd2164c51e38093d7eff1e52f3185867c758d7dc8b8789c98735bf"} Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.715697 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.825019 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ssh-key\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.825671 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-2\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.825756 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-3\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.825875 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-1\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.825926 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-4\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.826027 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-0\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.826087 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnfpt\" (UniqueName: \"kubernetes.io/projected/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-kube-api-access-hnfpt\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.826145 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-telemetry-combined-ca-bundle\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.826193 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-inventory\") pod \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\" (UID: \"0cacd54c-23e1-40c3-963a-33bd7c91a0ad\") " Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.842601 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.845593 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-kube-api-access-hnfpt" (OuterVolumeSpecName: "kube-api-access-hnfpt") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "kube-api-access-hnfpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.875463 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.878930 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.888231 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-inventory" (OuterVolumeSpecName: "inventory") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.888716 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.890117 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-3" (OuterVolumeSpecName: "ceilometer-compute-config-data-3") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "ceilometer-compute-config-data-3". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.890496 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.891696 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-4" (OuterVolumeSpecName: "ceilometer-compute-config-data-4") pod "0cacd54c-23e1-40c3-963a-33bd7c91a0ad" (UID: "0cacd54c-23e1-40c3-963a-33bd7c91a0ad"). InnerVolumeSpecName "ceilometer-compute-config-data-4". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928378 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928412 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928424 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-3\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928434 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928442 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-4\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928454 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928464 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnfpt\" (UniqueName: \"kubernetes.io/projected/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-kube-api-access-hnfpt\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928473 4611 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:25 crc kubenswrapper[4611]: I0929 13:28:25.928483 4611 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cacd54c-23e1-40c3-963a-33bd7c91a0ad-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:26 crc kubenswrapper[4611]: I0929 13:28:26.595525 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" event={"ID":"0cacd54c-23e1-40c3-963a-33bd7c91a0ad","Type":"ContainerDied","Data":"24169375dfa8873800956cbae230ed4c9ddbcd46dd9c03ce1d877492aa36f9a6"} Sep 29 13:28:26 crc kubenswrapper[4611]: I0929 13:28:26.595586 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24169375dfa8873800956cbae230ed4c9ddbcd46dd9c03ce1d877492aa36f9a6" Sep 29 13:28:26 crc kubenswrapper[4611]: I0929 13:28:26.595888 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.442696 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.443564 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="900d519c-288a-4341-911f-e429cbddfd5b" containerName="openstackclient" containerID="cri-o://b523b1cefb0bc86e82651cb2485164ffd84786629efb63de528fb53a0d93b6ae" gracePeriod=2 Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.455009 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.482589 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 13:28:39 crc kubenswrapper[4611]: E0929 13:28:39.487170 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="900d519c-288a-4341-911f-e429cbddfd5b" containerName="openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.487208 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="900d519c-288a-4341-911f-e429cbddfd5b" containerName="openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: E0929 13:28:39.487255 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cacd54c-23e1-40c3-963a-33bd7c91a0ad" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.487264 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cacd54c-23e1-40c3-963a-33bd7c91a0ad" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.487475 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cacd54c-23e1-40c3-963a-33bd7c91a0ad" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.487496 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="900d519c-288a-4341-911f-e429cbddfd5b" containerName="openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.488499 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.493723 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="900d519c-288a-4341-911f-e429cbddfd5b" podUID="ee5fae2a-545b-431c-9909-eab67261b348" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.503897 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.593410 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdntq\" (UniqueName: \"kubernetes.io/projected/ee5fae2a-545b-431c-9909-eab67261b348-kube-api-access-wdntq\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.593449 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.593496 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.593570 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config-secret\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.696211 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config-secret\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.696415 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdntq\" (UniqueName: \"kubernetes.io/projected/ee5fae2a-545b-431c-9909-eab67261b348-kube-api-access-wdntq\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.696468 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.696523 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.697567 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.703354 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config-secret\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.725271 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.731314 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdntq\" (UniqueName: \"kubernetes.io/projected/ee5fae2a-545b-431c-9909-eab67261b348-kube-api-access-wdntq\") pod \"openstackclient\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " pod="openstack/openstackclient" Sep 29 13:28:39 crc kubenswrapper[4611]: I0929 13:28:39.862108 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:28:40 crc kubenswrapper[4611]: I0929 13:28:40.529840 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:28:40 crc kubenswrapper[4611]: I0929 13:28:40.734901 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ee5fae2a-545b-431c-9909-eab67261b348","Type":"ContainerStarted","Data":"728281a8767f0de2d97d50f2f50e60920c79ce9480c19f1dd76586cae5fae269"} Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.207672 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.211116 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.215983 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.216228 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.216376 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.216558 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.216685 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.219899 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-4c467" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.221441 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378046 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378097 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378126 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378164 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7959j\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-kube-api-access-7959j\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378279 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378310 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378390 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c89022d6-96bf-48a2-bd3a-484f1136f7f3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.378412 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.480557 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c89022d6-96bf-48a2-bd3a-484f1136f7f3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.480993 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481016 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481039 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481063 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481097 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7959j\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-kube-api-access-7959j\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481142 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481171 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.481265 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c89022d6-96bf-48a2-bd3a-484f1136f7f3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.487769 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.487873 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.489054 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.489573 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.490194 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.490767 4611 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.490793 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/011e03d938f2ef8a964a7cdbb011cad12f4d4f86381f793fd7260f28b991e8a0/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.504111 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7959j\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-kube-api-access-7959j\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.565521 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.765615 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ee5fae2a-545b-431c-9909-eab67261b348","Type":"ContainerStarted","Data":"a26c31add2c2b26916a1beaab8d62db5c8f537dffacb6b1b27eb1d38e5b77f24"} Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.788120 4611 generic.go:334] "Generic (PLEG): container finished" podID="900d519c-288a-4341-911f-e429cbddfd5b" containerID="b523b1cefb0bc86e82651cb2485164ffd84786629efb63de528fb53a0d93b6ae" exitCode=137 Sep 29 13:28:41 crc kubenswrapper[4611]: I0929 13:28:41.852200 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.338793 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.507706 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config\") pod \"900d519c-288a-4341-911f-e429cbddfd5b\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.507803 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld5nl\" (UniqueName: \"kubernetes.io/projected/900d519c-288a-4341-911f-e429cbddfd5b-kube-api-access-ld5nl\") pod \"900d519c-288a-4341-911f-e429cbddfd5b\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.508237 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-combined-ca-bundle\") pod \"900d519c-288a-4341-911f-e429cbddfd5b\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.508376 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config-secret\") pod \"900d519c-288a-4341-911f-e429cbddfd5b\" (UID: \"900d519c-288a-4341-911f-e429cbddfd5b\") " Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.532780 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/900d519c-288a-4341-911f-e429cbddfd5b-kube-api-access-ld5nl" (OuterVolumeSpecName: "kube-api-access-ld5nl") pod "900d519c-288a-4341-911f-e429cbddfd5b" (UID: "900d519c-288a-4341-911f-e429cbddfd5b"). InnerVolumeSpecName "kube-api-access-ld5nl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.543509 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "900d519c-288a-4341-911f-e429cbddfd5b" (UID: "900d519c-288a-4341-911f-e429cbddfd5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.544150 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "900d519c-288a-4341-911f-e429cbddfd5b" (UID: "900d519c-288a-4341-911f-e429cbddfd5b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.574443 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "900d519c-288a-4341-911f-e429cbddfd5b" (UID: "900d519c-288a-4341-911f-e429cbddfd5b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.612809 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.612841 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.612852 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/900d519c-288a-4341-911f-e429cbddfd5b-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.612861 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld5nl\" (UniqueName: \"kubernetes.io/projected/900d519c-288a-4341-911f-e429cbddfd5b-kube-api-access-ld5nl\") on node \"crc\" DevicePath \"\"" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.613041 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.613028074 podStartE2EDuration="3.613028074s" podCreationTimestamp="2025-09-29 13:28:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:28:41.81508326 +0000 UTC m=+2908.706602866" watchObservedRunningTime="2025-09-29 13:28:42.613028074 +0000 UTC m=+2909.504547680" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.613472 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:28:42 crc kubenswrapper[4611]: W0929 13:28:42.614196 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc89022d6_96bf_48a2_bd3a_484f1136f7f3.slice/crio-f1f19245d5842bc1c8462bd528ee1c7aa30fdddf0d090cfb5756612861cb286c WatchSource:0}: Error finding container f1f19245d5842bc1c8462bd528ee1c7aa30fdddf0d090cfb5756612861cb286c: Status 404 returned error can't find the container with id f1f19245d5842bc1c8462bd528ee1c7aa30fdddf0d090cfb5756612861cb286c Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.800360 4611 scope.go:117] "RemoveContainer" containerID="b523b1cefb0bc86e82651cb2485164ffd84786629efb63de528fb53a0d93b6ae" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.800363 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:28:42 crc kubenswrapper[4611]: I0929 13:28:42.802712 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerStarted","Data":"f1f19245d5842bc1c8462bd528ee1c7aa30fdddf0d090cfb5756612861cb286c"} Sep 29 13:28:43 crc kubenswrapper[4611]: I0929 13:28:43.753518 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="900d519c-288a-4341-911f-e429cbddfd5b" path="/var/lib/kubelet/pods/900d519c-288a-4341-911f-e429cbddfd5b/volumes" Sep 29 13:28:49 crc kubenswrapper[4611]: I0929 13:28:49.869769 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerStarted","Data":"3da7b25ec86f70283fe0f91b10b2ef09bd15f1dae8ead9060bd5ba355c84f980"} Sep 29 13:28:57 crc kubenswrapper[4611]: I0929 13:28:57.947036 4611 generic.go:334] "Generic (PLEG): container finished" podID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerID="3da7b25ec86f70283fe0f91b10b2ef09bd15f1dae8ead9060bd5ba355c84f980" exitCode=0 Sep 29 13:28:57 crc kubenswrapper[4611]: I0929 13:28:57.947780 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerDied","Data":"3da7b25ec86f70283fe0f91b10b2ef09bd15f1dae8ead9060bd5ba355c84f980"} Sep 29 13:29:05 crc kubenswrapper[4611]: I0929 13:29:05.021968 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerStarted","Data":"363a5c72cdd3e70d68063f521b0d69d30d262754bbc5e8e9509573d3836e11e0"} Sep 29 13:29:08 crc kubenswrapper[4611]: I0929 13:29:08.053399 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerStarted","Data":"1c6ec99c32d2e13ae28991e04a8003e6ff40e6725866784ca3919848ffb502b4"} Sep 29 13:29:11 crc kubenswrapper[4611]: I0929 13:29:11.086275 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerStarted","Data":"6ab1b4e7fe9ae8e2b4284f919cc2687302be81a90c2343d4849d6f17a8a87f3b"} Sep 29 13:29:11 crc kubenswrapper[4611]: I0929 13:29:11.120140 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.177326254 podStartE2EDuration="31.120121905s" podCreationTimestamp="2025-09-29 13:28:40 +0000 UTC" firstStartedPulling="2025-09-29 13:28:42.616893895 +0000 UTC m=+2909.508413501" lastFinishedPulling="2025-09-29 13:29:10.559689546 +0000 UTC m=+2937.451209152" observedRunningTime="2025-09-29 13:29:11.118690474 +0000 UTC m=+2938.010210090" watchObservedRunningTime="2025-09-29 13:29:11.120121905 +0000 UTC m=+2938.011641511" Sep 29 13:29:11 crc kubenswrapper[4611]: I0929 13:29:11.853602 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:11 crc kubenswrapper[4611]: I0929 13:29:11.853773 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:11 crc kubenswrapper[4611]: I0929 13:29:11.856779 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:12 crc kubenswrapper[4611]: I0929 13:29:12.097365 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:13 crc kubenswrapper[4611]: I0929 13:29:13.940462 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:13 crc kubenswrapper[4611]: I0929 13:29:13.940996 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="ee5fae2a-545b-431c-9909-eab67261b348" containerName="openstackclient" containerID="cri-o://a26c31add2c2b26916a1beaab8d62db5c8f537dffacb6b1b27eb1d38e5b77f24" gracePeriod=2 Sep 29 13:29:13 crc kubenswrapper[4611]: I0929 13:29:13.977789 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.006344 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: E0929 13:29:14.006798 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee5fae2a-545b-431c-9909-eab67261b348" containerName="openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.006815 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee5fae2a-545b-431c-9909-eab67261b348" containerName="openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.007002 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee5fae2a-545b-431c-9909-eab67261b348" containerName="openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.007699 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.032529 4611 status_manager.go:875] "Failed to update status for pod" pod="openstack/openstackclient" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e9004b-2835-43e5-a45b-5de541eb325f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:29:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:29:14Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:29:14Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.rdoproject.org/podified-master-centos10/openstack-openstackclient:current-tested\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"openstackclient\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/clouds.yaml\\\",\\\"name\\\":\\\"openstack-config\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/secure.yaml\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/cloudrc\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem\\\",\\\"name\\\":\\\"combined-ca-bundle\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tkwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:29:14Z\\\"}}\" for pod \"openstack\"/\"openstackclient\": pods \"openstackclient\" not found" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.076772 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.112217 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: E0929 13:29:14.114527 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-2tkwj openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="65e9004b-2835-43e5-a45b-5de541eb325f" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.138724 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.157390 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.157588 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.157645 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tkwj\" (UniqueName: \"kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.157681 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config-secret\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.157801 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.159780 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.173416 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="ee5fae2a-545b-431c-9909-eab67261b348" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.175964 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.209819 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="65e9004b-2835-43e5-a45b-5de541eb325f" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.259200 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f5bf612f-9341-4ddc-8525-55976ff9bedc-openstack-config-secret\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.259274 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cxp2\" (UniqueName: \"kubernetes.io/projected/f5bf612f-9341-4ddc-8525-55976ff9bedc-kube-api-access-9cxp2\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.259313 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.259350 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tkwj\" (UniqueName: \"kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.259380 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config-secret\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.259892 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bf612f-9341-4ddc-8525-55976ff9bedc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.260311 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.260396 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f5bf612f-9341-4ddc-8525-55976ff9bedc-openstack-config\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.261157 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: E0929 13:29:14.261231 4611 projected.go:194] Error preparing data for projected volume kube-api-access-2tkwj for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (65e9004b-2835-43e5-a45b-5de541eb325f) does not match the UID in record. The object might have been deleted and then recreated Sep 29 13:29:14 crc kubenswrapper[4611]: E0929 13:29:14.261274 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj podName:65e9004b-2835-43e5-a45b-5de541eb325f nodeName:}" failed. No retries permitted until 2025-09-29 13:29:14.761259636 +0000 UTC m=+2941.652779242 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-2tkwj" (UniqueName: "kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj") pod "openstackclient" (UID: "65e9004b-2835-43e5-a45b-5de541eb325f") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (65e9004b-2835-43e5-a45b-5de541eb325f) does not match the UID in record. The object might have been deleted and then recreated Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.265698 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config-secret\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.266741 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.368400 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bf612f-9341-4ddc-8525-55976ff9bedc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.368563 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f5bf612f-9341-4ddc-8525-55976ff9bedc-openstack-config\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.368647 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f5bf612f-9341-4ddc-8525-55976ff9bedc-openstack-config-secret\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.368768 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cxp2\" (UniqueName: \"kubernetes.io/projected/f5bf612f-9341-4ddc-8525-55976ff9bedc-kube-api-access-9cxp2\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.370251 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f5bf612f-9341-4ddc-8525-55976ff9bedc-openstack-config\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.372438 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5bf612f-9341-4ddc-8525-55976ff9bedc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.372915 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f5bf612f-9341-4ddc-8525-55976ff9bedc-openstack-config-secret\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.385516 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cxp2\" (UniqueName: \"kubernetes.io/projected/f5bf612f-9341-4ddc-8525-55976ff9bedc-kube-api-access-9cxp2\") pod \"openstackclient\" (UID: \"f5bf612f-9341-4ddc-8525-55976ff9bedc\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.497568 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: I0929 13:29:14.775217 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tkwj\" (UniqueName: \"kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj\") pod \"openstackclient\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " pod="openstack/openstackclient" Sep 29 13:29:14 crc kubenswrapper[4611]: E0929 13:29:14.778506 4611 projected.go:194] Error preparing data for projected volume kube-api-access-2tkwj for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (65e9004b-2835-43e5-a45b-5de541eb325f) does not match the UID in record. The object might have been deleted and then recreated Sep 29 13:29:14 crc kubenswrapper[4611]: E0929 13:29:14.778588 4611 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj podName:65e9004b-2835-43e5-a45b-5de541eb325f nodeName:}" failed. No retries permitted until 2025-09-29 13:29:15.778560348 +0000 UTC m=+2942.670079954 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-2tkwj" (UniqueName: "kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj") pod "openstackclient" (UID: "65e9004b-2835-43e5-a45b-5de541eb325f") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (65e9004b-2835-43e5-a45b-5de541eb325f) does not match the UID in record. The object might have been deleted and then recreated Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.004370 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.133564 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.133808 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="thanos-sidecar" containerID="cri-o://6ab1b4e7fe9ae8e2b4284f919cc2687302be81a90c2343d4849d6f17a8a87f3b" gracePeriod=600 Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.133859 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="config-reloader" containerID="cri-o://1c6ec99c32d2e13ae28991e04a8003e6ff40e6725866784ca3919848ffb502b4" gracePeriod=600 Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.133961 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="prometheus" containerID="cri-o://363a5c72cdd3e70d68063f521b0d69d30d262754bbc5e8e9509573d3836e11e0" gracePeriod=600 Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.143571 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="65e9004b-2835-43e5-a45b-5de541eb325f" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.147167 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.151472 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="65e9004b-2835-43e5-a45b-5de541eb325f" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.272113 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.290875 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-combined-ca-bundle\") pod \"65e9004b-2835-43e5-a45b-5de541eb325f\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.291011 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config\") pod \"65e9004b-2835-43e5-a45b-5de541eb325f\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.291065 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config-secret\") pod \"65e9004b-2835-43e5-a45b-5de541eb325f\" (UID: \"65e9004b-2835-43e5-a45b-5de541eb325f\") " Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.291508 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "65e9004b-2835-43e5-a45b-5de541eb325f" (UID: "65e9004b-2835-43e5-a45b-5de541eb325f"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.292316 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tkwj\" (UniqueName: \"kubernetes.io/projected/65e9004b-2835-43e5-a45b-5de541eb325f-kube-api-access-2tkwj\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.292336 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.296922 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65e9004b-2835-43e5-a45b-5de541eb325f" (UID: "65e9004b-2835-43e5-a45b-5de541eb325f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.301193 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "65e9004b-2835-43e5-a45b-5de541eb325f" (UID: "65e9004b-2835-43e5-a45b-5de541eb325f"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:15 crc kubenswrapper[4611]: W0929 13:29:15.312827 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5bf612f_9341_4ddc_8525_55976ff9bedc.slice/crio-ccd5c71d3e37d7265ee25ae2890baffd77c1d0187440e032eec4ef1129082f8f WatchSource:0}: Error finding container ccd5c71d3e37d7265ee25ae2890baffd77c1d0187440e032eec4ef1129082f8f: Status 404 returned error can't find the container with id ccd5c71d3e37d7265ee25ae2890baffd77c1d0187440e032eec4ef1129082f8f Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.393816 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.393844 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65e9004b-2835-43e5-a45b-5de541eb325f-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:15 crc kubenswrapper[4611]: I0929 13:29:15.747986 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65e9004b-2835-43e5-a45b-5de541eb325f" path="/var/lib/kubelet/pods/65e9004b-2835-43e5-a45b-5de541eb325f/volumes" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.149348 4611 generic.go:334] "Generic (PLEG): container finished" podID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerID="6ab1b4e7fe9ae8e2b4284f919cc2687302be81a90c2343d4849d6f17a8a87f3b" exitCode=0 Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.150854 4611 generic.go:334] "Generic (PLEG): container finished" podID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerID="1c6ec99c32d2e13ae28991e04a8003e6ff40e6725866784ca3919848ffb502b4" exitCode=0 Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.150967 4611 generic.go:334] "Generic (PLEG): container finished" podID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerID="363a5c72cdd3e70d68063f521b0d69d30d262754bbc5e8e9509573d3836e11e0" exitCode=0 Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.149425 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerDied","Data":"6ab1b4e7fe9ae8e2b4284f919cc2687302be81a90c2343d4849d6f17a8a87f3b"} Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.151170 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerDied","Data":"1c6ec99c32d2e13ae28991e04a8003e6ff40e6725866784ca3919848ffb502b4"} Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.151188 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerDied","Data":"363a5c72cdd3e70d68063f521b0d69d30d262754bbc5e8e9509573d3836e11e0"} Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.153857 4611 generic.go:334] "Generic (PLEG): container finished" podID="ee5fae2a-545b-431c-9909-eab67261b348" containerID="a26c31add2c2b26916a1beaab8d62db5c8f537dffacb6b1b27eb1d38e5b77f24" exitCode=137 Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.155883 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.155914 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f5bf612f-9341-4ddc-8525-55976ff9bedc","Type":"ContainerStarted","Data":"ac46fae0506f6df96591ef69b7d1abe19f4d8a98295e5d2c1aff69d21aa4c35f"} Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.155966 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f5bf612f-9341-4ddc-8525-55976ff9bedc","Type":"ContainerStarted","Data":"ccd5c71d3e37d7265ee25ae2890baffd77c1d0187440e032eec4ef1129082f8f"} Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.159281 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="65e9004b-2835-43e5-a45b-5de541eb325f" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.178057 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="65e9004b-2835-43e5-a45b-5de541eb325f" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.180562 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.180542171 podStartE2EDuration="2.180542171s" podCreationTimestamp="2025-09-29 13:29:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:29:16.174709662 +0000 UTC m=+2943.066229288" watchObservedRunningTime="2025-09-29 13:29:16.180542171 +0000 UTC m=+2943.072061777" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.239892 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.338030 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config-secret\") pod \"ee5fae2a-545b-431c-9909-eab67261b348\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.338100 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-combined-ca-bundle\") pod \"ee5fae2a-545b-431c-9909-eab67261b348\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.338133 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdntq\" (UniqueName: \"kubernetes.io/projected/ee5fae2a-545b-431c-9909-eab67261b348-kube-api-access-wdntq\") pod \"ee5fae2a-545b-431c-9909-eab67261b348\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.338305 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config\") pod \"ee5fae2a-545b-431c-9909-eab67261b348\" (UID: \"ee5fae2a-545b-431c-9909-eab67261b348\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.347056 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee5fae2a-545b-431c-9909-eab67261b348-kube-api-access-wdntq" (OuterVolumeSpecName: "kube-api-access-wdntq") pod "ee5fae2a-545b-431c-9909-eab67261b348" (UID: "ee5fae2a-545b-431c-9909-eab67261b348"). InnerVolumeSpecName "kube-api-access-wdntq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.405266 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee5fae2a-545b-431c-9909-eab67261b348" (UID: "ee5fae2a-545b-431c-9909-eab67261b348"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.406033 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "ee5fae2a-545b-431c-9909-eab67261b348" (UID: "ee5fae2a-545b-431c-9909-eab67261b348"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.412688 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "ee5fae2a-545b-431c-9909-eab67261b348" (UID: "ee5fae2a-545b-431c-9909-eab67261b348"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.441211 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.441238 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdntq\" (UniqueName: \"kubernetes.io/projected/ee5fae2a-545b-431c-9909-eab67261b348-kube-api-access-wdntq\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.441249 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.441259 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee5fae2a-545b-431c-9909-eab67261b348-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.834000 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968050 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7959j\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-kube-api-access-7959j\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968100 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-thanos-prometheus-http-client-file\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968143 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-tls-assets\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968279 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968301 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config-out\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968334 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c89022d6-96bf-48a2-bd3a-484f1136f7f3-prometheus-metric-storage-rulefiles-0\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.968384 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-web-config\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.969661 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\" (UID: \"c89022d6-96bf-48a2-bd3a-484f1136f7f3\") " Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.970001 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c89022d6-96bf-48a2-bd3a-484f1136f7f3-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.970430 4611 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c89022d6-96bf-48a2-bd3a-484f1136f7f3-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.973998 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config-out" (OuterVolumeSpecName: "config-out") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.974002 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config" (OuterVolumeSpecName: "config") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.974537 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.974814 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.975673 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-kube-api-access-7959j" (OuterVolumeSpecName: "kube-api-access-7959j") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "kube-api-access-7959j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:29:16 crc kubenswrapper[4611]: I0929 13:29:16.997689 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-web-config" (OuterVolumeSpecName: "web-config") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.020023 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "c89022d6-96bf-48a2-bd3a-484f1136f7f3" (UID: "c89022d6-96bf-48a2-bd3a-484f1136f7f3"). InnerVolumeSpecName "pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072735 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072782 4611 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c89022d6-96bf-48a2-bd3a-484f1136f7f3-config-out\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072791 4611 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-web-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072833 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") on node \"crc\" " Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072846 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7959j\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-kube-api-access-7959j\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072858 4611 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c89022d6-96bf-48a2-bd3a-484f1136f7f3-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.072867 4611 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c89022d6-96bf-48a2-bd3a-484f1136f7f3-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.121315 4611 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.121464 4611 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac") on node "crc" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.175577 4611 reconciler_common.go:293] "Volume detached for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.189291 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c89022d6-96bf-48a2-bd3a-484f1136f7f3","Type":"ContainerDied","Data":"f1f19245d5842bc1c8462bd528ee1c7aa30fdddf0d090cfb5756612861cb286c"} Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.189351 4611 scope.go:117] "RemoveContainer" containerID="6ab1b4e7fe9ae8e2b4284f919cc2687302be81a90c2343d4849d6f17a8a87f3b" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.189529 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.198384 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.216086 4611 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="ee5fae2a-545b-431c-9909-eab67261b348" podUID="f5bf612f-9341-4ddc-8525-55976ff9bedc" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.241295 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.241998 4611 scope.go:117] "RemoveContainer" containerID="1c6ec99c32d2e13ae28991e04a8003e6ff40e6725866784ca3919848ffb502b4" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.253251 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.281651 4611 scope.go:117] "RemoveContainer" containerID="363a5c72cdd3e70d68063f521b0d69d30d262754bbc5e8e9509573d3836e11e0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.319176 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:17 crc kubenswrapper[4611]: E0929 13:29:17.320077 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="config-reloader" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320101 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="config-reloader" Sep 29 13:29:17 crc kubenswrapper[4611]: E0929 13:29:17.320128 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="init-config-reloader" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320137 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="init-config-reloader" Sep 29 13:29:17 crc kubenswrapper[4611]: E0929 13:29:17.320165 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="prometheus" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320176 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="prometheus" Sep 29 13:29:17 crc kubenswrapper[4611]: E0929 13:29:17.320190 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="thanos-sidecar" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320198 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="thanos-sidecar" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320798 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="config-reloader" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320857 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="thanos-sidecar" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.320897 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" containerName="prometheus" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.349538 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.349717 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.371179 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.371386 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.371727 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.372370 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-4c467" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.376752 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.377006 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.383222 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.392969 4611 scope.go:117] "RemoveContainer" containerID="3da7b25ec86f70283fe0f91b10b2ef09bd15f1dae8ead9060bd5ba355c84f980" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.425453 4611 scope.go:117] "RemoveContainer" containerID="a26c31add2c2b26916a1beaab8d62db5c8f537dffacb6b1b27eb1d38e5b77f24" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488570 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488644 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-config\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488676 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f03d874-6a2e-46d9-9771-30724984f113-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488709 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488728 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6f03d874-6a2e-46d9-9771-30724984f113-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488745 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488792 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488880 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488923 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488956 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.488988 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5pw7\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-kube-api-access-l5pw7\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590578 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-config\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590669 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f03d874-6a2e-46d9-9771-30724984f113-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590722 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590750 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6f03d874-6a2e-46d9-9771-30724984f113-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590780 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590853 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590952 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.590994 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.591021 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.591049 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5pw7\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-kube-api-access-l5pw7\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.591100 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.597747 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.598424 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.598436 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6f03d874-6a2e-46d9-9771-30724984f113-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.598961 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.599618 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-config\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.599768 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.600553 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.600842 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f03d874-6a2e-46d9-9771-30724984f113-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.604347 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.614222 4611 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.614276 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/011e03d938f2ef8a964a7cdbb011cad12f4d4f86381f793fd7260f28b991e8a0/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.625496 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5pw7\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-kube-api-access-l5pw7\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.657261 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.705546 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.754526 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c89022d6-96bf-48a2-bd3a-484f1136f7f3" path="/var/lib/kubelet/pods/c89022d6-96bf-48a2-bd3a-484f1136f7f3/volumes" Sep 29 13:29:17 crc kubenswrapper[4611]: I0929 13:29:17.755301 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee5fae2a-545b-431c-9909-eab67261b348" path="/var/lib/kubelet/pods/ee5fae2a-545b-431c-9909-eab67261b348/volumes" Sep 29 13:29:18 crc kubenswrapper[4611]: W0929 13:29:18.240865 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f03d874_6a2e_46d9_9771_30724984f113.slice/crio-4ce848ad895f4980cdb1ad25f7000a6eab1d51191a110de5bdd0ecab17e0843f WatchSource:0}: Error finding container 4ce848ad895f4980cdb1ad25f7000a6eab1d51191a110de5bdd0ecab17e0843f: Status 404 returned error can't find the container with id 4ce848ad895f4980cdb1ad25f7000a6eab1d51191a110de5bdd0ecab17e0843f Sep 29 13:29:18 crc kubenswrapper[4611]: I0929 13:29:18.243904 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:18 crc kubenswrapper[4611]: I0929 13:29:18.463488 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:19 crc kubenswrapper[4611]: I0929 13:29:19.215784 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6f03d874-6a2e-46d9-9771-30724984f113","Type":"ContainerStarted","Data":"4ce848ad895f4980cdb1ad25f7000a6eab1d51191a110de5bdd0ecab17e0843f"} Sep 29 13:29:22 crc kubenswrapper[4611]: I0929 13:29:22.245076 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6f03d874-6a2e-46d9-9771-30724984f113","Type":"ContainerStarted","Data":"f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06"} Sep 29 13:29:22 crc kubenswrapper[4611]: I0929 13:29:22.245221 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="6f03d874-6a2e-46d9-9771-30724984f113" containerName="init-config-reloader" containerID="cri-o://f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06" gracePeriod=600 Sep 29 13:29:28 crc kubenswrapper[4611]: I0929 13:29:28.974221 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.018574 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-secret-combined-ca-bundle\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.018838 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.019731 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-config\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.019871 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-thanos-prometheus-http-client-file\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020024 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020164 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020291 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f03d874-6a2e-46d9-9771-30724984f113-config-out\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020482 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020590 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5pw7\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-kube-api-access-l5pw7\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020709 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6f03d874-6a2e-46d9-9771-30724984f113-prometheus-metric-storage-rulefiles-0\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.020848 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-tls-assets\") pod \"6f03d874-6a2e-46d9-9771-30724984f113\" (UID: \"6f03d874-6a2e-46d9-9771-30724984f113\") " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.023233 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f03d874-6a2e-46d9-9771-30724984f113-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.024658 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.024988 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.029011 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-kube-api-access-l5pw7" (OuterVolumeSpecName: "kube-api-access-l5pw7") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "kube-api-access-l5pw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.029966 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-config" (OuterVolumeSpecName: "config") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.030028 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.030067 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f03d874-6a2e-46d9-9771-30724984f113-config-out" (OuterVolumeSpecName: "config-out") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.031934 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.032858 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.041660 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.050838 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config" (OuterVolumeSpecName: "web-config") pod "6f03d874-6a2e-46d9-9771-30724984f113" (UID: "6f03d874-6a2e-46d9-9771-30724984f113"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123181 4611 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123240 4611 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123256 4611 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6f03d874-6a2e-46d9-9771-30724984f113-config-out\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123314 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") on node \"crc\" " Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123331 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5pw7\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-kube-api-access-l5pw7\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123346 4611 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6f03d874-6a2e-46d9-9771-30724984f113-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123360 4611 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6f03d874-6a2e-46d9-9771-30724984f113-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123372 4611 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123384 4611 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-web-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123395 4611 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.123406 4611 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6f03d874-6a2e-46d9-9771-30724984f113-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.151313 4611 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.151762 4611 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac") on node "crc" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.225032 4611 reconciler_common.go:293] "Volume detached for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") on node \"crc\" DevicePath \"\"" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.314592 4611 generic.go:334] "Generic (PLEG): container finished" podID="6f03d874-6a2e-46d9-9771-30724984f113" containerID="f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06" exitCode=0 Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.314670 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6f03d874-6a2e-46d9-9771-30724984f113","Type":"ContainerDied","Data":"f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06"} Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.314717 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6f03d874-6a2e-46d9-9771-30724984f113","Type":"ContainerDied","Data":"4ce848ad895f4980cdb1ad25f7000a6eab1d51191a110de5bdd0ecab17e0843f"} Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.314737 4611 scope.go:117] "RemoveContainer" containerID="f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.315015 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.342406 4611 scope.go:117] "RemoveContainer" containerID="f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06" Sep 29 13:29:29 crc kubenswrapper[4611]: E0929 13:29:29.343007 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06\": container with ID starting with f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06 not found: ID does not exist" containerID="f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.343069 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06"} err="failed to get container status \"f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06\": rpc error: code = NotFound desc = could not find container \"f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06\": container with ID starting with f89dd8ba0414dba36a3fed9787acfad2f0f3044e8bcc512a89f86d802b77cb06 not found: ID does not exist" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.386333 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.397721 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.426398 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:29 crc kubenswrapper[4611]: E0929 13:29:29.429360 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f03d874-6a2e-46d9-9771-30724984f113" containerName="init-config-reloader" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.429388 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f03d874-6a2e-46d9-9771-30724984f113" containerName="init-config-reloader" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.429667 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f03d874-6a2e-46d9-9771-30724984f113" containerName="init-config-reloader" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.431977 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.438230 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.438245 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.444477 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-4c467" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.444546 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.444555 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.446824 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.450472 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.464567 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.544555 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.544718 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.544884 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.544917 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-config\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.544943 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.545000 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpdnd\" (UniqueName: \"kubernetes.io/projected/f68a498f-2ca9-4462-a6dd-e77c69312c95-kube-api-access-fpdnd\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.545134 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f68a498f-2ca9-4462-a6dd-e77c69312c95-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.545185 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.545242 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f68a498f-2ca9-4462-a6dd-e77c69312c95-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.545300 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f68a498f-2ca9-4462-a6dd-e77c69312c95-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.545337 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660262 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660325 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-config\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660357 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660397 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpdnd\" (UniqueName: \"kubernetes.io/projected/f68a498f-2ca9-4462-a6dd-e77c69312c95-kube-api-access-fpdnd\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660465 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f68a498f-2ca9-4462-a6dd-e77c69312c95-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660498 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660530 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f68a498f-2ca9-4462-a6dd-e77c69312c95-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660563 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f68a498f-2ca9-4462-a6dd-e77c69312c95-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660593 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660680 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.660924 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.676642 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f68a498f-2ca9-4462-a6dd-e77c69312c95-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.678158 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.680049 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f68a498f-2ca9-4462-a6dd-e77c69312c95-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.680409 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.680497 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.680981 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.681009 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-config\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.684276 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f68a498f-2ca9-4462-a6dd-e77c69312c95-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.696462 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f68a498f-2ca9-4462-a6dd-e77c69312c95-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.721461 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpdnd\" (UniqueName: \"kubernetes.io/projected/f68a498f-2ca9-4462-a6dd-e77c69312c95-kube-api-access-fpdnd\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.792604 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f03d874-6a2e-46d9-9771-30724984f113" path="/var/lib/kubelet/pods/6f03d874-6a2e-46d9-9771-30724984f113/volumes" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.800197 4611 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.800240 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/011e03d938f2ef8a964a7cdbb011cad12f4d4f86381f793fd7260f28b991e8a0/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:29 crc kubenswrapper[4611]: I0929 13:29:29.922429 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b0c45dcf-d033-4e02-b34a-8840c9e220ac\") pod \"prometheus-metric-storage-0\" (UID: \"f68a498f-2ca9-4462-a6dd-e77c69312c95\") " pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:30 crc kubenswrapper[4611]: I0929 13:29:30.055366 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 13:29:30 crc kubenswrapper[4611]: I0929 13:29:30.505514 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 13:29:31 crc kubenswrapper[4611]: I0929 13:29:31.337579 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f68a498f-2ca9-4462-a6dd-e77c69312c95","Type":"ContainerStarted","Data":"638ba14e2b622850e7cbaa44a9251038d021b5f4c7e7620a1ab7a46d4cf0ead9"} Sep 29 13:29:34 crc kubenswrapper[4611]: I0929 13:29:34.363146 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f68a498f-2ca9-4462-a6dd-e77c69312c95","Type":"ContainerStarted","Data":"e8b6682df20c686ace3a5f8fa28abf0450895725776ca6c9b100d2353abf1b76"} Sep 29 13:29:34 crc kubenswrapper[4611]: I0929 13:29:34.629053 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:29:34 crc kubenswrapper[4611]: I0929 13:29:34.629125 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:29:43 crc kubenswrapper[4611]: I0929 13:29:43.492342 4611 generic.go:334] "Generic (PLEG): container finished" podID="f68a498f-2ca9-4462-a6dd-e77c69312c95" containerID="e8b6682df20c686ace3a5f8fa28abf0450895725776ca6c9b100d2353abf1b76" exitCode=0 Sep 29 13:29:43 crc kubenswrapper[4611]: I0929 13:29:43.492457 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f68a498f-2ca9-4462-a6dd-e77c69312c95","Type":"ContainerDied","Data":"e8b6682df20c686ace3a5f8fa28abf0450895725776ca6c9b100d2353abf1b76"} Sep 29 13:29:44 crc kubenswrapper[4611]: I0929 13:29:44.514517 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f68a498f-2ca9-4462-a6dd-e77c69312c95","Type":"ContainerStarted","Data":"48b02d94d910050b152cb420970438697a76881713369aee1f0cc0b2c5abc673"} Sep 29 13:29:47 crc kubenswrapper[4611]: I0929 13:29:47.545015 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f68a498f-2ca9-4462-a6dd-e77c69312c95","Type":"ContainerStarted","Data":"8e85b58f6809d9596a97c5587dbd238872234117a8144d5bc872255c2dbd54ee"} Sep 29 13:29:47 crc kubenswrapper[4611]: I0929 13:29:47.545541 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f68a498f-2ca9-4462-a6dd-e77c69312c95","Type":"ContainerStarted","Data":"aaca53b8a877972c82cd335ec3ef1e7e13822235ff3073ebdab7aa259bcf1d57"} Sep 29 13:29:47 crc kubenswrapper[4611]: I0929 13:29:47.589891 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=18.58987089 podStartE2EDuration="18.58987089s" podCreationTimestamp="2025-09-29 13:29:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:29:47.587318306 +0000 UTC m=+2974.478837912" watchObservedRunningTime="2025-09-29 13:29:47.58987089 +0000 UTC m=+2974.481390506" Sep 29 13:29:50 crc kubenswrapper[4611]: I0929 13:29:50.056287 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.056393 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.070074 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.193950 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl"] Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.195308 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.198294 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.204296 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.206710 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl"] Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.350954 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2hnl\" (UniqueName: \"kubernetes.io/projected/d52f2b21-2059-4956-a0cb-1ddcab3822d8-kube-api-access-w2hnl\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.351021 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d52f2b21-2059-4956-a0cb-1ddcab3822d8-config-volume\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.351124 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d52f2b21-2059-4956-a0cb-1ddcab3822d8-secret-volume\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.452441 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2hnl\" (UniqueName: \"kubernetes.io/projected/d52f2b21-2059-4956-a0cb-1ddcab3822d8-kube-api-access-w2hnl\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.453052 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d52f2b21-2059-4956-a0cb-1ddcab3822d8-config-volume\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.453212 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d52f2b21-2059-4956-a0cb-1ddcab3822d8-secret-volume\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.454565 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d52f2b21-2059-4956-a0cb-1ddcab3822d8-config-volume\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.463297 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d52f2b21-2059-4956-a0cb-1ddcab3822d8-secret-volume\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.469737 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2hnl\" (UniqueName: \"kubernetes.io/projected/d52f2b21-2059-4956-a0cb-1ddcab3822d8-kube-api-access-w2hnl\") pod \"collect-profiles-29319210-897tl\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.518272 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:00 crc kubenswrapper[4611]: I0929 13:30:00.677386 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 29 13:30:01 crc kubenswrapper[4611]: I0929 13:30:01.039528 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl"] Sep 29 13:30:01 crc kubenswrapper[4611]: I0929 13:30:01.681762 4611 generic.go:334] "Generic (PLEG): container finished" podID="d52f2b21-2059-4956-a0cb-1ddcab3822d8" containerID="d5a25dd64f0756bf15f8d00b7dabcc3a0679300c3398c4740622fd3a0c26ee5e" exitCode=0 Sep 29 13:30:01 crc kubenswrapper[4611]: I0929 13:30:01.681858 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" event={"ID":"d52f2b21-2059-4956-a0cb-1ddcab3822d8","Type":"ContainerDied","Data":"d5a25dd64f0756bf15f8d00b7dabcc3a0679300c3398c4740622fd3a0c26ee5e"} Sep 29 13:30:01 crc kubenswrapper[4611]: I0929 13:30:01.683198 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" event={"ID":"d52f2b21-2059-4956-a0cb-1ddcab3822d8","Type":"ContainerStarted","Data":"57ee730f865fdc6daa89467f7194d38a59746abbc3e527389ea3c9c93530292f"} Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.119163 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-st7l4"] Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.121438 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.131058 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-st7l4"] Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.289175 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8xlt\" (UniqueName: \"kubernetes.io/projected/5fdecdcf-d829-44a9-9632-0c47d35cb04f-kube-api-access-v8xlt\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.289668 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-catalog-content\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.289860 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-utilities\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.391872 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-catalog-content\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.392030 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-utilities\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.392079 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8xlt\" (UniqueName: \"kubernetes.io/projected/5fdecdcf-d829-44a9-9632-0c47d35cb04f-kube-api-access-v8xlt\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.393047 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-catalog-content\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.393171 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-utilities\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.422355 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8xlt\" (UniqueName: \"kubernetes.io/projected/5fdecdcf-d829-44a9-9632-0c47d35cb04f-kube-api-access-v8xlt\") pod \"redhat-operators-st7l4\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.436190 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:02 crc kubenswrapper[4611]: I0929 13:30:02.994855 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-st7l4"] Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.178214 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.317229 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d52f2b21-2059-4956-a0cb-1ddcab3822d8-config-volume\") pod \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.317323 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2hnl\" (UniqueName: \"kubernetes.io/projected/d52f2b21-2059-4956-a0cb-1ddcab3822d8-kube-api-access-w2hnl\") pod \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.317471 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d52f2b21-2059-4956-a0cb-1ddcab3822d8-secret-volume\") pod \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\" (UID: \"d52f2b21-2059-4956-a0cb-1ddcab3822d8\") " Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.318813 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d52f2b21-2059-4956-a0cb-1ddcab3822d8-config-volume" (OuterVolumeSpecName: "config-volume") pod "d52f2b21-2059-4956-a0cb-1ddcab3822d8" (UID: "d52f2b21-2059-4956-a0cb-1ddcab3822d8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.327082 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52f2b21-2059-4956-a0cb-1ddcab3822d8-kube-api-access-w2hnl" (OuterVolumeSpecName: "kube-api-access-w2hnl") pod "d52f2b21-2059-4956-a0cb-1ddcab3822d8" (UID: "d52f2b21-2059-4956-a0cb-1ddcab3822d8"). InnerVolumeSpecName "kube-api-access-w2hnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.331788 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52f2b21-2059-4956-a0cb-1ddcab3822d8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d52f2b21-2059-4956-a0cb-1ddcab3822d8" (UID: "d52f2b21-2059-4956-a0cb-1ddcab3822d8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.419924 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2hnl\" (UniqueName: \"kubernetes.io/projected/d52f2b21-2059-4956-a0cb-1ddcab3822d8-kube-api-access-w2hnl\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.419958 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d52f2b21-2059-4956-a0cb-1ddcab3822d8-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.419968 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d52f2b21-2059-4956-a0cb-1ddcab3822d8-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.704293 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" event={"ID":"d52f2b21-2059-4956-a0cb-1ddcab3822d8","Type":"ContainerDied","Data":"57ee730f865fdc6daa89467f7194d38a59746abbc3e527389ea3c9c93530292f"} Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.704331 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57ee730f865fdc6daa89467f7194d38a59746abbc3e527389ea3c9c93530292f" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.704382 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl" Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.707527 4611 generic.go:334] "Generic (PLEG): container finished" podID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerID="c3a7a750342dd761b9e7ded05766349223a661afc800e0beffb652d24e0607ac" exitCode=0 Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.707570 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerDied","Data":"c3a7a750342dd761b9e7ded05766349223a661afc800e0beffb652d24e0607ac"} Sep 29 13:30:03 crc kubenswrapper[4611]: I0929 13:30:03.707596 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerStarted","Data":"f2b5aa5a1102d2c42fd39b1f297836c255c00111581956862c3e4a760371c8e7"} Sep 29 13:30:04 crc kubenswrapper[4611]: I0929 13:30:04.277586 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2"] Sep 29 13:30:04 crc kubenswrapper[4611]: I0929 13:30:04.286313 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319165-9r7j2"] Sep 29 13:30:04 crc kubenswrapper[4611]: I0929 13:30:04.628260 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:30:04 crc kubenswrapper[4611]: I0929 13:30:04.628533 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:30:04 crc kubenswrapper[4611]: I0929 13:30:04.748937 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerStarted","Data":"31e00aac422c6e998d6fc71162972b363c360c16fb14e37f8125e453d3526ac4"} Sep 29 13:30:05 crc kubenswrapper[4611]: I0929 13:30:05.749709 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e409f2-7bd4-450d-8e97-c0020dc091c4" path="/var/lib/kubelet/pods/e7e409f2-7bd4-450d-8e97-c0020dc091c4/volumes" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.339898 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-index-hw62p"] Sep 29 13:30:09 crc kubenswrapper[4611]: E0929 13:30:09.340654 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52f2b21-2059-4956-a0cb-1ddcab3822d8" containerName="collect-profiles" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.340665 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52f2b21-2059-4956-a0cb-1ddcab3822d8" containerName="collect-profiles" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.340902 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52f2b21-2059-4956-a0cb-1ddcab3822d8" containerName="collect-profiles" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.341534 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.356324 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-index-dockercfg-fd5jm" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.368008 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-hw62p"] Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.469967 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fxj8\" (UniqueName: \"kubernetes.io/projected/267f8e93-87ba-483d-a4e3-a5c2cf82b772-kube-api-access-7fxj8\") pod \"watcher-operator-index-hw62p\" (UID: \"267f8e93-87ba-483d-a4e3-a5c2cf82b772\") " pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.571795 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fxj8\" (UniqueName: \"kubernetes.io/projected/267f8e93-87ba-483d-a4e3-a5c2cf82b772-kube-api-access-7fxj8\") pod \"watcher-operator-index-hw62p\" (UID: \"267f8e93-87ba-483d-a4e3-a5c2cf82b772\") " pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.595783 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fxj8\" (UniqueName: \"kubernetes.io/projected/267f8e93-87ba-483d-a4e3-a5c2cf82b772-kube-api-access-7fxj8\") pod \"watcher-operator-index-hw62p\" (UID: \"267f8e93-87ba-483d-a4e3-a5c2cf82b772\") " pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:09 crc kubenswrapper[4611]: I0929 13:30:09.660454 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:10 crc kubenswrapper[4611]: I0929 13:30:10.258616 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-hw62p"] Sep 29 13:30:10 crc kubenswrapper[4611]: W0929 13:30:10.280313 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod267f8e93_87ba_483d_a4e3_a5c2cf82b772.slice/crio-bc1a938e881f648cfb31b9e71ec2b0c801d8fa9acc8f4d2781aaa640d671e3c5 WatchSource:0}: Error finding container bc1a938e881f648cfb31b9e71ec2b0c801d8fa9acc8f4d2781aaa640d671e3c5: Status 404 returned error can't find the container with id bc1a938e881f648cfb31b9e71ec2b0c801d8fa9acc8f4d2781aaa640d671e3c5 Sep 29 13:30:10 crc kubenswrapper[4611]: I0929 13:30:10.828001 4611 generic.go:334] "Generic (PLEG): container finished" podID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerID="31e00aac422c6e998d6fc71162972b363c360c16fb14e37f8125e453d3526ac4" exitCode=0 Sep 29 13:30:10 crc kubenswrapper[4611]: I0929 13:30:10.828077 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerDied","Data":"31e00aac422c6e998d6fc71162972b363c360c16fb14e37f8125e453d3526ac4"} Sep 29 13:30:10 crc kubenswrapper[4611]: I0929 13:30:10.831671 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-hw62p" event={"ID":"267f8e93-87ba-483d-a4e3-a5c2cf82b772","Type":"ContainerStarted","Data":"bc1a938e881f648cfb31b9e71ec2b0c801d8fa9acc8f4d2781aaa640d671e3c5"} Sep 29 13:30:11 crc kubenswrapper[4611]: I0929 13:30:11.843253 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerStarted","Data":"f9ea27458ba186acb8854d21d39163af0f4f71962a3759051f59b5ea26e425f0"} Sep 29 13:30:11 crc kubenswrapper[4611]: I0929 13:30:11.849004 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-hw62p" event={"ID":"267f8e93-87ba-483d-a4e3-a5c2cf82b772","Type":"ContainerStarted","Data":"5a102616ad8f9587771deaf9127c7f29da459c73eb851630fe50dd17e8823630"} Sep 29 13:30:11 crc kubenswrapper[4611]: I0929 13:30:11.867672 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-st7l4" podStartSLOduration=2.145262793 podStartE2EDuration="9.86764828s" podCreationTimestamp="2025-09-29 13:30:02 +0000 UTC" firstStartedPulling="2025-09-29 13:30:03.710375073 +0000 UTC m=+2990.601894679" lastFinishedPulling="2025-09-29 13:30:11.43276056 +0000 UTC m=+2998.324280166" observedRunningTime="2025-09-29 13:30:11.865419955 +0000 UTC m=+2998.756939581" watchObservedRunningTime="2025-09-29 13:30:11.86764828 +0000 UTC m=+2998.759167886" Sep 29 13:30:11 crc kubenswrapper[4611]: I0929 13:30:11.891209 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-index-hw62p" podStartSLOduration=2.29370325 podStartE2EDuration="2.89118424s" podCreationTimestamp="2025-09-29 13:30:09 +0000 UTC" firstStartedPulling="2025-09-29 13:30:10.283084239 +0000 UTC m=+2997.174603835" lastFinishedPulling="2025-09-29 13:30:10.880565219 +0000 UTC m=+2997.772084825" observedRunningTime="2025-09-29 13:30:11.882699985 +0000 UTC m=+2998.774219591" watchObservedRunningTime="2025-09-29 13:30:11.89118424 +0000 UTC m=+2998.782703846" Sep 29 13:30:12 crc kubenswrapper[4611]: I0929 13:30:12.439873 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:12 crc kubenswrapper[4611]: I0929 13:30:12.440350 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:13 crc kubenswrapper[4611]: I0929 13:30:13.493525 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-st7l4" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" probeResult="failure" output=< Sep 29 13:30:13 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:30:13 crc kubenswrapper[4611]: > Sep 29 13:30:19 crc kubenswrapper[4611]: I0929 13:30:19.660690 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:19 crc kubenswrapper[4611]: I0929 13:30:19.661202 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:19 crc kubenswrapper[4611]: I0929 13:30:19.697205 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:19 crc kubenswrapper[4611]: I0929 13:30:19.955822 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-index-hw62p" Sep 29 13:30:23 crc kubenswrapper[4611]: I0929 13:30:23.481198 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-st7l4" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" probeResult="failure" output=< Sep 29 13:30:23 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:30:23 crc kubenswrapper[4611]: > Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.144398 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-svccx"] Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.147185 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.167298 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-svccx"] Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.337289 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-catalog-content\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.337335 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-utilities\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.337407 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z84p8\" (UniqueName: \"kubernetes.io/projected/df86d436-4914-470d-bacd-a47a773dbc1a-kube-api-access-z84p8\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.438717 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-catalog-content\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.438772 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-utilities\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.438838 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z84p8\" (UniqueName: \"kubernetes.io/projected/df86d436-4914-470d-bacd-a47a773dbc1a-kube-api-access-z84p8\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.439229 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-catalog-content\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.439593 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-utilities\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.468698 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z84p8\" (UniqueName: \"kubernetes.io/projected/df86d436-4914-470d-bacd-a47a773dbc1a-kube-api-access-z84p8\") pod \"certified-operators-svccx\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.492308 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:26 crc kubenswrapper[4611]: I0929 13:30:26.992671 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-svccx"] Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.082982 4611 generic.go:334] "Generic (PLEG): container finished" podID="df86d436-4914-470d-bacd-a47a773dbc1a" containerID="1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8" exitCode=0 Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.083206 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerDied","Data":"1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8"} Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.083233 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerStarted","Data":"b663d2b8715da457dbdaa791236bcd7c289d2dd75a4302e37292f898a21d53dc"} Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.085868 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.255678 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk"] Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.257853 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.262163 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-9xwgq" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.281858 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk"] Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.319295 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-bundle\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.319525 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-util\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.319688 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-625tc\" (UniqueName: \"kubernetes.io/projected/f1038ae1-2330-481e-915d-bf84f8cdbd07-kube-api-access-625tc\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.422034 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-bundle\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.422094 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-util\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.422205 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-625tc\" (UniqueName: \"kubernetes.io/projected/f1038ae1-2330-481e-915d-bf84f8cdbd07-kube-api-access-625tc\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.422686 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-bundle\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.422893 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-util\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.448502 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-625tc\" (UniqueName: \"kubernetes.io/projected/f1038ae1-2330-481e-915d-bf84f8cdbd07-kube-api-access-625tc\") pod \"11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:28 crc kubenswrapper[4611]: I0929 13:30:28.573544 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:29 crc kubenswrapper[4611]: I0929 13:30:29.093377 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerStarted","Data":"88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac"} Sep 29 13:30:29 crc kubenswrapper[4611]: I0929 13:30:29.172715 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk"] Sep 29 13:30:29 crc kubenswrapper[4611]: W0929 13:30:29.179730 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1038ae1_2330_481e_915d_bf84f8cdbd07.slice/crio-f2878a55629432c6bcfa953acda83df6ad97324bb7b73f6e179352837b9fb97c WatchSource:0}: Error finding container f2878a55629432c6bcfa953acda83df6ad97324bb7b73f6e179352837b9fb97c: Status 404 returned error can't find the container with id f2878a55629432c6bcfa953acda83df6ad97324bb7b73f6e179352837b9fb97c Sep 29 13:30:30 crc kubenswrapper[4611]: I0929 13:30:30.109354 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" event={"ID":"f1038ae1-2330-481e-915d-bf84f8cdbd07","Type":"ContainerStarted","Data":"f267bd840cc254a907dca7c58155bd881cbee6f5f098db786c531b4120cc8902"} Sep 29 13:30:30 crc kubenswrapper[4611]: I0929 13:30:30.114277 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" event={"ID":"f1038ae1-2330-481e-915d-bf84f8cdbd07","Type":"ContainerStarted","Data":"f2878a55629432c6bcfa953acda83df6ad97324bb7b73f6e179352837b9fb97c"} Sep 29 13:30:31 crc kubenswrapper[4611]: I0929 13:30:31.117716 4611 generic.go:334] "Generic (PLEG): container finished" podID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerID="f267bd840cc254a907dca7c58155bd881cbee6f5f098db786c531b4120cc8902" exitCode=0 Sep 29 13:30:31 crc kubenswrapper[4611]: I0929 13:30:31.117805 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" event={"ID":"f1038ae1-2330-481e-915d-bf84f8cdbd07","Type":"ContainerDied","Data":"f267bd840cc254a907dca7c58155bd881cbee6f5f098db786c531b4120cc8902"} Sep 29 13:30:31 crc kubenswrapper[4611]: I0929 13:30:31.119705 4611 generic.go:334] "Generic (PLEG): container finished" podID="df86d436-4914-470d-bacd-a47a773dbc1a" containerID="88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac" exitCode=0 Sep 29 13:30:31 crc kubenswrapper[4611]: I0929 13:30:31.119742 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerDied","Data":"88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac"} Sep 29 13:30:32 crc kubenswrapper[4611]: I0929 13:30:32.132095 4611 generic.go:334] "Generic (PLEG): container finished" podID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerID="16e0ebdb8f17d6bbc51ce05cbb2b6baf0222906bffc01d088cfcff92db76dd1c" exitCode=0 Sep 29 13:30:32 crc kubenswrapper[4611]: I0929 13:30:32.132151 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" event={"ID":"f1038ae1-2330-481e-915d-bf84f8cdbd07","Type":"ContainerDied","Data":"16e0ebdb8f17d6bbc51ce05cbb2b6baf0222906bffc01d088cfcff92db76dd1c"} Sep 29 13:30:32 crc kubenswrapper[4611]: I0929 13:30:32.138726 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerStarted","Data":"99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9"} Sep 29 13:30:32 crc kubenswrapper[4611]: I0929 13:30:32.198531 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-svccx" podStartSLOduration=2.7449434569999998 podStartE2EDuration="6.198510219s" podCreationTimestamp="2025-09-29 13:30:26 +0000 UTC" firstStartedPulling="2025-09-29 13:30:28.085634711 +0000 UTC m=+3014.977154317" lastFinishedPulling="2025-09-29 13:30:31.539201473 +0000 UTC m=+3018.430721079" observedRunningTime="2025-09-29 13:30:32.177169293 +0000 UTC m=+3019.068688919" watchObservedRunningTime="2025-09-29 13:30:32.198510219 +0000 UTC m=+3019.090029835" Sep 29 13:30:33 crc kubenswrapper[4611]: I0929 13:30:33.150813 4611 generic.go:334] "Generic (PLEG): container finished" podID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerID="fb2be7dd67396138875c5743fbffa8b412d7d412754a51a73f2d07ebc379f553" exitCode=0 Sep 29 13:30:33 crc kubenswrapper[4611]: I0929 13:30:33.150977 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" event={"ID":"f1038ae1-2330-481e-915d-bf84f8cdbd07","Type":"ContainerDied","Data":"fb2be7dd67396138875c5743fbffa8b412d7d412754a51a73f2d07ebc379f553"} Sep 29 13:30:33 crc kubenswrapper[4611]: I0929 13:30:33.494031 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-st7l4" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" probeResult="failure" output=< Sep 29 13:30:33 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:30:33 crc kubenswrapper[4611]: > Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.151829 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2gvrs"] Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.157708 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.187124 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gvrs"] Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.307297 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-utilities\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.307500 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc75m\" (UniqueName: \"kubernetes.io/projected/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-kube-api-access-cc75m\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.307642 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-catalog-content\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.411304 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-utilities\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.411415 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc75m\" (UniqueName: \"kubernetes.io/projected/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-kube-api-access-cc75m\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.412029 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-utilities\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.412085 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-catalog-content\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.412339 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-catalog-content\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.443594 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc75m\" (UniqueName: \"kubernetes.io/projected/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-kube-api-access-cc75m\") pod \"redhat-marketplace-2gvrs\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.488138 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.628496 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.628550 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.628599 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.629300 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.629354 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" gracePeriod=600 Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.686158 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:34 crc kubenswrapper[4611]: E0929 13:30:34.781606 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.819675 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-bundle\") pod \"f1038ae1-2330-481e-915d-bf84f8cdbd07\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.819748 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-util\") pod \"f1038ae1-2330-481e-915d-bf84f8cdbd07\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.819907 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-625tc\" (UniqueName: \"kubernetes.io/projected/f1038ae1-2330-481e-915d-bf84f8cdbd07-kube-api-access-625tc\") pod \"f1038ae1-2330-481e-915d-bf84f8cdbd07\" (UID: \"f1038ae1-2330-481e-915d-bf84f8cdbd07\") " Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.821027 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-bundle" (OuterVolumeSpecName: "bundle") pod "f1038ae1-2330-481e-915d-bf84f8cdbd07" (UID: "f1038ae1-2330-481e-915d-bf84f8cdbd07"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.827033 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1038ae1-2330-481e-915d-bf84f8cdbd07-kube-api-access-625tc" (OuterVolumeSpecName: "kube-api-access-625tc") pod "f1038ae1-2330-481e-915d-bf84f8cdbd07" (UID: "f1038ae1-2330-481e-915d-bf84f8cdbd07"). InnerVolumeSpecName "kube-api-access-625tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.829067 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-util" (OuterVolumeSpecName: "util") pod "f1038ae1-2330-481e-915d-bf84f8cdbd07" (UID: "f1038ae1-2330-481e-915d-bf84f8cdbd07"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.923376 4611 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.923421 4611 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f1038ae1-2330-481e-915d-bf84f8cdbd07-util\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:34 crc kubenswrapper[4611]: I0929 13:30:34.923436 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-625tc\" (UniqueName: \"kubernetes.io/projected/f1038ae1-2330-481e-915d-bf84f8cdbd07-kube-api-access-625tc\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.079140 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gvrs"] Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.204724 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" event={"ID":"f1038ae1-2330-481e-915d-bf84f8cdbd07","Type":"ContainerDied","Data":"f2878a55629432c6bcfa953acda83df6ad97324bb7b73f6e179352837b9fb97c"} Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.205018 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2878a55629432c6bcfa953acda83df6ad97324bb7b73f6e179352837b9fb97c" Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.204865 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk" Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.223595 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" exitCode=0 Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.223687 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c"} Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.223720 4611 scope.go:117] "RemoveContainer" containerID="bdf11d9c6f683907844a1ddba734a5a1556ae365384489865ce074aef18eaf22" Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.224305 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:30:35 crc kubenswrapper[4611]: E0929 13:30:35.224527 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:30:35 crc kubenswrapper[4611]: I0929 13:30:35.237873 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerStarted","Data":"c3e6f7a7ce8899f7b3ed5806bf53e781a060bc259fe2d2b07645d3291a6faed6"} Sep 29 13:30:36 crc kubenswrapper[4611]: I0929 13:30:36.249382 4611 generic.go:334] "Generic (PLEG): container finished" podID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerID="3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e" exitCode=0 Sep 29 13:30:36 crc kubenswrapper[4611]: I0929 13:30:36.249735 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerDied","Data":"3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e"} Sep 29 13:30:36 crc kubenswrapper[4611]: I0929 13:30:36.494335 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:36 crc kubenswrapper[4611]: I0929 13:30:36.494372 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:36 crc kubenswrapper[4611]: I0929 13:30:36.541692 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:37 crc kubenswrapper[4611]: I0929 13:30:37.179229 4611 scope.go:117] "RemoveContainer" containerID="0e0972f99474d164cd71916c89cf314d197b5221b6eb95e7f28211e396b85a50" Sep 29 13:30:37 crc kubenswrapper[4611]: I0929 13:30:37.270006 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerStarted","Data":"501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84"} Sep 29 13:30:37 crc kubenswrapper[4611]: I0929 13:30:37.326126 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:38 crc kubenswrapper[4611]: I0929 13:30:38.280309 4611 generic.go:334] "Generic (PLEG): container finished" podID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerID="501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84" exitCode=0 Sep 29 13:30:38 crc kubenswrapper[4611]: I0929 13:30:38.280380 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerDied","Data":"501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84"} Sep 29 13:30:38 crc kubenswrapper[4611]: I0929 13:30:38.360742 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-svccx"] Sep 29 13:30:39 crc kubenswrapper[4611]: I0929 13:30:39.290937 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-svccx" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="registry-server" containerID="cri-o://99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9" gracePeriod=2 Sep 29 13:30:39 crc kubenswrapper[4611]: I0929 13:30:39.291294 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerStarted","Data":"403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f"} Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.225402 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.243510 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2gvrs" podStartSLOduration=3.817032176 podStartE2EDuration="6.2434936s" podCreationTimestamp="2025-09-29 13:30:34 +0000 UTC" firstStartedPulling="2025-09-29 13:30:36.251842066 +0000 UTC m=+3023.143361682" lastFinishedPulling="2025-09-29 13:30:38.6783035 +0000 UTC m=+3025.569823106" observedRunningTime="2025-09-29 13:30:39.384008417 +0000 UTC m=+3026.275528023" watchObservedRunningTime="2025-09-29 13:30:40.2434936 +0000 UTC m=+3027.135013206" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.303572 4611 generic.go:334] "Generic (PLEG): container finished" podID="df86d436-4914-470d-bacd-a47a773dbc1a" containerID="99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9" exitCode=0 Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.304558 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-svccx" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.305040 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerDied","Data":"99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9"} Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.305068 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-svccx" event={"ID":"df86d436-4914-470d-bacd-a47a773dbc1a","Type":"ContainerDied","Data":"b663d2b8715da457dbdaa791236bcd7c289d2dd75a4302e37292f898a21d53dc"} Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.305086 4611 scope.go:117] "RemoveContainer" containerID="99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.338140 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z84p8\" (UniqueName: \"kubernetes.io/projected/df86d436-4914-470d-bacd-a47a773dbc1a-kube-api-access-z84p8\") pod \"df86d436-4914-470d-bacd-a47a773dbc1a\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.338298 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-utilities\") pod \"df86d436-4914-470d-bacd-a47a773dbc1a\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.338336 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-catalog-content\") pod \"df86d436-4914-470d-bacd-a47a773dbc1a\" (UID: \"df86d436-4914-470d-bacd-a47a773dbc1a\") " Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.339177 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-utilities" (OuterVolumeSpecName: "utilities") pod "df86d436-4914-470d-bacd-a47a773dbc1a" (UID: "df86d436-4914-470d-bacd-a47a773dbc1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.341067 4611 scope.go:117] "RemoveContainer" containerID="88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.344283 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df86d436-4914-470d-bacd-a47a773dbc1a-kube-api-access-z84p8" (OuterVolumeSpecName: "kube-api-access-z84p8") pod "df86d436-4914-470d-bacd-a47a773dbc1a" (UID: "df86d436-4914-470d-bacd-a47a773dbc1a"). InnerVolumeSpecName "kube-api-access-z84p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.409469 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df86d436-4914-470d-bacd-a47a773dbc1a" (UID: "df86d436-4914-470d-bacd-a47a773dbc1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.413006 4611 scope.go:117] "RemoveContainer" containerID="1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.441155 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z84p8\" (UniqueName: \"kubernetes.io/projected/df86d436-4914-470d-bacd-a47a773dbc1a-kube-api-access-z84p8\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.441191 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.441203 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df86d436-4914-470d-bacd-a47a773dbc1a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.499612 4611 scope.go:117] "RemoveContainer" containerID="99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9" Sep 29 13:30:40 crc kubenswrapper[4611]: E0929 13:30:40.500101 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9\": container with ID starting with 99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9 not found: ID does not exist" containerID="99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.500140 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9"} err="failed to get container status \"99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9\": rpc error: code = NotFound desc = could not find container \"99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9\": container with ID starting with 99c94f00f6ea15e37d7089803885b842959e08b8ebb2b51ebe22f2f3488043c9 not found: ID does not exist" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.500164 4611 scope.go:117] "RemoveContainer" containerID="88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac" Sep 29 13:30:40 crc kubenswrapper[4611]: E0929 13:30:40.500368 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac\": container with ID starting with 88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac not found: ID does not exist" containerID="88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.500424 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac"} err="failed to get container status \"88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac\": rpc error: code = NotFound desc = could not find container \"88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac\": container with ID starting with 88edb8c7191d6556149c70e5260b6699b2775f14cb2958cada8175c639da21ac not found: ID does not exist" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.500443 4611 scope.go:117] "RemoveContainer" containerID="1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8" Sep 29 13:30:40 crc kubenswrapper[4611]: E0929 13:30:40.500641 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8\": container with ID starting with 1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8 not found: ID does not exist" containerID="1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.500668 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8"} err="failed to get container status \"1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8\": rpc error: code = NotFound desc = could not find container \"1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8\": container with ID starting with 1568afccaa0a242afff354aed68b36e6ca83dee35e4990deb68c18f0889c51e8 not found: ID does not exist" Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.638319 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-svccx"] Sep 29 13:30:40 crc kubenswrapper[4611]: I0929 13:30:40.645981 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-svccx"] Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.754024 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" path="/var/lib/kubelet/pods/df86d436-4914-470d-bacd-a47a773dbc1a/volumes" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.937487 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m"] Sep 29 13:30:41 crc kubenswrapper[4611]: E0929 13:30:41.937912 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="extract" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.937926 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="extract" Sep 29 13:30:41 crc kubenswrapper[4611]: E0929 13:30:41.937936 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="registry-server" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.937941 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="registry-server" Sep 29 13:30:41 crc kubenswrapper[4611]: E0929 13:30:41.937962 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="pull" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.937967 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="pull" Sep 29 13:30:41 crc kubenswrapper[4611]: E0929 13:30:41.937984 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="extract-content" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.937989 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="extract-content" Sep 29 13:30:41 crc kubenswrapper[4611]: E0929 13:30:41.938003 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="util" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.938008 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="util" Sep 29 13:30:41 crc kubenswrapper[4611]: E0929 13:30:41.938034 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="extract-utilities" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.938041 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="extract-utilities" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.938197 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1038ae1-2330-481e-915d-bf84f8cdbd07" containerName="extract" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.938230 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="df86d436-4914-470d-bacd-a47a773dbc1a" containerName="registry-server" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.939449 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.948536 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-5dbxl" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.948905 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-service-cert" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.953802 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m"] Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.968437 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g44r7\" (UniqueName: \"kubernetes.io/projected/babeead3-8fa0-433e-aa7b-bed713216821-kube-api-access-g44r7\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.968641 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-webhook-cert\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:41 crc kubenswrapper[4611]: I0929 13:30:41.968695 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-apiservice-cert\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.070528 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g44r7\" (UniqueName: \"kubernetes.io/projected/babeead3-8fa0-433e-aa7b-bed713216821-kube-api-access-g44r7\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.070741 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-webhook-cert\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.070780 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-apiservice-cert\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.076746 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-apiservice-cert\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.078026 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-webhook-cert\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.092063 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g44r7\" (UniqueName: \"kubernetes.io/projected/babeead3-8fa0-433e-aa7b-bed713216821-kube-api-access-g44r7\") pod \"watcher-operator-controller-manager-6598b66547-cl44m\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.261999 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.620861 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.711050 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:42 crc kubenswrapper[4611]: I0929 13:30:42.971612 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m"] Sep 29 13:30:43 crc kubenswrapper[4611]: I0929 13:30:43.333231 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" event={"ID":"babeead3-8fa0-433e-aa7b-bed713216821","Type":"ContainerStarted","Data":"ab9721ff59996715756c6dc9b7bf788a973b8ea4f155fb4493ed56bb59b7c886"} Sep 29 13:30:44 crc kubenswrapper[4611]: I0929 13:30:44.489805 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:44 crc kubenswrapper[4611]: I0929 13:30:44.490129 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:44 crc kubenswrapper[4611]: I0929 13:30:44.575907 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:45 crc kubenswrapper[4611]: I0929 13:30:45.353541 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" event={"ID":"babeead3-8fa0-433e-aa7b-bed713216821","Type":"ContainerStarted","Data":"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333"} Sep 29 13:30:45 crc kubenswrapper[4611]: I0929 13:30:45.354174 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:45 crc kubenswrapper[4611]: I0929 13:30:45.354216 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" event={"ID":"babeead3-8fa0-433e-aa7b-bed713216821","Type":"ContainerStarted","Data":"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee"} Sep 29 13:30:45 crc kubenswrapper[4611]: I0929 13:30:45.435750 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" podStartSLOduration=2.907201295 podStartE2EDuration="4.435734455s" podCreationTimestamp="2025-09-29 13:30:41 +0000 UTC" firstStartedPulling="2025-09-29 13:30:43.012153905 +0000 UTC m=+3029.903673511" lastFinishedPulling="2025-09-29 13:30:44.540687065 +0000 UTC m=+3031.432206671" observedRunningTime="2025-09-29 13:30:45.434029226 +0000 UTC m=+3032.325548832" watchObservedRunningTime="2025-09-29 13:30:45.435734455 +0000 UTC m=+3032.327254061" Sep 29 13:30:45 crc kubenswrapper[4611]: I0929 13:30:45.447255 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:48 crc kubenswrapper[4611]: I0929 13:30:48.736433 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:30:48 crc kubenswrapper[4611]: E0929 13:30:48.737213 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:30:49 crc kubenswrapper[4611]: I0929 13:30:49.936326 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-st7l4"] Sep 29 13:30:49 crc kubenswrapper[4611]: I0929 13:30:49.936556 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-st7l4" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" containerID="cri-o://f9ea27458ba186acb8854d21d39163af0f4f71962a3759051f59b5ea26e425f0" gracePeriod=2 Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.407000 4611 generic.go:334] "Generic (PLEG): container finished" podID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerID="f9ea27458ba186acb8854d21d39163af0f4f71962a3759051f59b5ea26e425f0" exitCode=0 Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.407457 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerDied","Data":"f9ea27458ba186acb8854d21d39163af0f4f71962a3759051f59b5ea26e425f0"} Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.407489 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-st7l4" event={"ID":"5fdecdcf-d829-44a9-9632-0c47d35cb04f","Type":"ContainerDied","Data":"f2b5aa5a1102d2c42fd39b1f297836c255c00111581956862c3e4a760371c8e7"} Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.407505 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2b5aa5a1102d2c42fd39b1f297836c255c00111581956862c3e4a760371c8e7" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.482755 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.579269 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-catalog-content\") pod \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.579704 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8xlt\" (UniqueName: \"kubernetes.io/projected/5fdecdcf-d829-44a9-9632-0c47d35cb04f-kube-api-access-v8xlt\") pod \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.579765 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-utilities\") pod \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\" (UID: \"5fdecdcf-d829-44a9-9632-0c47d35cb04f\") " Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.581144 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-utilities" (OuterVolumeSpecName: "utilities") pod "5fdecdcf-d829-44a9-9632-0c47d35cb04f" (UID: "5fdecdcf-d829-44a9-9632-0c47d35cb04f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.598807 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fdecdcf-d829-44a9-9632-0c47d35cb04f-kube-api-access-v8xlt" (OuterVolumeSpecName: "kube-api-access-v8xlt") pod "5fdecdcf-d829-44a9-9632-0c47d35cb04f" (UID: "5fdecdcf-d829-44a9-9632-0c47d35cb04f"). InnerVolumeSpecName "kube-api-access-v8xlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.658898 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5fdecdcf-d829-44a9-9632-0c47d35cb04f" (UID: "5fdecdcf-d829-44a9-9632-0c47d35cb04f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.682293 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.682331 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdecdcf-d829-44a9-9632-0c47d35cb04f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:50 crc kubenswrapper[4611]: I0929 13:30:50.682346 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8xlt\" (UniqueName: \"kubernetes.io/projected/5fdecdcf-d829-44a9-9632-0c47d35cb04f-kube-api-access-v8xlt\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:51 crc kubenswrapper[4611]: I0929 13:30:51.417160 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-st7l4" Sep 29 13:30:51 crc kubenswrapper[4611]: I0929 13:30:51.477092 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-st7l4"] Sep 29 13:30:51 crc kubenswrapper[4611]: I0929 13:30:51.489999 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-st7l4"] Sep 29 13:30:51 crc kubenswrapper[4611]: I0929 13:30:51.750572 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" path="/var/lib/kubelet/pods/5fdecdcf-d829-44a9-9632-0c47d35cb04f/volumes" Sep 29 13:30:52 crc kubenswrapper[4611]: I0929 13:30:52.267651 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:30:52 crc kubenswrapper[4611]: I0929 13:30:52.535926 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gvrs"] Sep 29 13:30:52 crc kubenswrapper[4611]: I0929 13:30:52.536734 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2gvrs" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="registry-server" containerID="cri-o://403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f" gracePeriod=2 Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.037487 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.132672 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-utilities\") pod \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.132753 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-catalog-content\") pod \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.132785 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc75m\" (UniqueName: \"kubernetes.io/projected/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-kube-api-access-cc75m\") pod \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\" (UID: \"bd9f807b-92d9-4f46-a73c-b539eb8d67c8\") " Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.135675 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-utilities" (OuterVolumeSpecName: "utilities") pod "bd9f807b-92d9-4f46-a73c-b539eb8d67c8" (UID: "bd9f807b-92d9-4f46-a73c-b539eb8d67c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.141830 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-kube-api-access-cc75m" (OuterVolumeSpecName: "kube-api-access-cc75m") pod "bd9f807b-92d9-4f46-a73c-b539eb8d67c8" (UID: "bd9f807b-92d9-4f46-a73c-b539eb8d67c8"). InnerVolumeSpecName "kube-api-access-cc75m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.154295 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd9f807b-92d9-4f46-a73c-b539eb8d67c8" (UID: "bd9f807b-92d9-4f46-a73c-b539eb8d67c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.238315 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.238579 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.238688 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc75m\" (UniqueName: \"kubernetes.io/projected/bd9f807b-92d9-4f46-a73c-b539eb8d67c8-kube-api-access-cc75m\") on node \"crc\" DevicePath \"\"" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.440517 4611 generic.go:334] "Generic (PLEG): container finished" podID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerID="403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f" exitCode=0 Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.440571 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerDied","Data":"403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f"} Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.440591 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gvrs" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.440611 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gvrs" event={"ID":"bd9f807b-92d9-4f46-a73c-b539eb8d67c8","Type":"ContainerDied","Data":"c3e6f7a7ce8899f7b3ed5806bf53e781a060bc259fe2d2b07645d3291a6faed6"} Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.440650 4611 scope.go:117] "RemoveContainer" containerID="403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.475979 4611 scope.go:117] "RemoveContainer" containerID="501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.494986 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gvrs"] Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.501210 4611 scope.go:117] "RemoveContainer" containerID="3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.506096 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gvrs"] Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.559737 4611 scope.go:117] "RemoveContainer" containerID="403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f" Sep 29 13:30:53 crc kubenswrapper[4611]: E0929 13:30:53.560124 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f\": container with ID starting with 403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f not found: ID does not exist" containerID="403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.560150 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f"} err="failed to get container status \"403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f\": rpc error: code = NotFound desc = could not find container \"403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f\": container with ID starting with 403073f2afd4a202148dae28acac4f8ff73a17a8324084b186db33ea125f6a0f not found: ID does not exist" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.560171 4611 scope.go:117] "RemoveContainer" containerID="501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84" Sep 29 13:30:53 crc kubenswrapper[4611]: E0929 13:30:53.560605 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84\": container with ID starting with 501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84 not found: ID does not exist" containerID="501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.560651 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84"} err="failed to get container status \"501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84\": rpc error: code = NotFound desc = could not find container \"501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84\": container with ID starting with 501aa64ff28c851ea8d4277a71bc5f7e2776c85d1fae401b1c0c71fc5e2a6f84 not found: ID does not exist" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.560671 4611 scope.go:117] "RemoveContainer" containerID="3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e" Sep 29 13:30:53 crc kubenswrapper[4611]: E0929 13:30:53.560927 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e\": container with ID starting with 3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e not found: ID does not exist" containerID="3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.560942 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e"} err="failed to get container status \"3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e\": rpc error: code = NotFound desc = could not find container \"3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e\": container with ID starting with 3aec6495aa7303e88c8e33e9958b819ef5b6ef364bbbce994ac2c07c7086848e not found: ID does not exist" Sep 29 13:30:53 crc kubenswrapper[4611]: I0929 13:30:53.752302 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" path="/var/lib/kubelet/pods/bd9f807b-92d9-4f46-a73c-b539eb8d67c8/volumes" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416018 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw"] Sep 29 13:30:54 crc kubenswrapper[4611]: E0929 13:30:54.416376 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="registry-server" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416396 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="registry-server" Sep 29 13:30:54 crc kubenswrapper[4611]: E0929 13:30:54.416414 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="extract-content" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416422 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="extract-content" Sep 29 13:30:54 crc kubenswrapper[4611]: E0929 13:30:54.416436 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="extract-utilities" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416442 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="extract-utilities" Sep 29 13:30:54 crc kubenswrapper[4611]: E0929 13:30:54.416459 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="extract-content" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416467 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="extract-content" Sep 29 13:30:54 crc kubenswrapper[4611]: E0929 13:30:54.416492 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="extract-utilities" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416499 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="extract-utilities" Sep 29 13:30:54 crc kubenswrapper[4611]: E0929 13:30:54.416517 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416526 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416791 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fdecdcf-d829-44a9-9632-0c47d35cb04f" containerName="registry-server" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.416808 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd9f807b-92d9-4f46-a73c-b539eb8d67c8" containerName="registry-server" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.418061 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.453705 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw"] Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.568718 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95c1a320-ab1a-4f16-ae1b-1cb890574834-apiservice-cert\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.568858 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95c1a320-ab1a-4f16-ae1b-1cb890574834-webhook-cert\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.569012 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwwjr\" (UniqueName: \"kubernetes.io/projected/95c1a320-ab1a-4f16-ae1b-1cb890574834-kube-api-access-vwwjr\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.671190 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95c1a320-ab1a-4f16-ae1b-1cb890574834-webhook-cert\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.671288 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwwjr\" (UniqueName: \"kubernetes.io/projected/95c1a320-ab1a-4f16-ae1b-1cb890574834-kube-api-access-vwwjr\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.671408 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95c1a320-ab1a-4f16-ae1b-1cb890574834-apiservice-cert\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.677225 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95c1a320-ab1a-4f16-ae1b-1cb890574834-apiservice-cert\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.679197 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95c1a320-ab1a-4f16-ae1b-1cb890574834-webhook-cert\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.694252 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwwjr\" (UniqueName: \"kubernetes.io/projected/95c1a320-ab1a-4f16-ae1b-1cb890574834-kube-api-access-vwwjr\") pod \"watcher-operator-controller-manager-659c84d44d-kp6fw\" (UID: \"95c1a320-ab1a-4f16-ae1b-1cb890574834\") " pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:54 crc kubenswrapper[4611]: I0929 13:30:54.742353 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:55 crc kubenswrapper[4611]: I0929 13:30:55.217154 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw"] Sep 29 13:30:55 crc kubenswrapper[4611]: I0929 13:30:55.508733 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" event={"ID":"95c1a320-ab1a-4f16-ae1b-1cb890574834","Type":"ContainerStarted","Data":"01e42796420c09ebd48b8ce288a6e3d6032145e9a754aeb09fa647e650f692f2"} Sep 29 13:30:55 crc kubenswrapper[4611]: I0929 13:30:55.508783 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" event={"ID":"95c1a320-ab1a-4f16-ae1b-1cb890574834","Type":"ContainerStarted","Data":"d4be89d6dc28664b8220a15014c443ffd67a1c85dfc89cb8d9d4a7faf0ce0745"} Sep 29 13:30:56 crc kubenswrapper[4611]: I0929 13:30:56.522126 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" event={"ID":"95c1a320-ab1a-4f16-ae1b-1cb890574834","Type":"ContainerStarted","Data":"bd368a4126d0bae6f25cb4d097dd510b1c3522f7d5ad32aab0461673f281ff26"} Sep 29 13:30:56 crc kubenswrapper[4611]: I0929 13:30:56.523690 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:30:56 crc kubenswrapper[4611]: I0929 13:30:56.543522 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" podStartSLOduration=2.543501122 podStartE2EDuration="2.543501122s" podCreationTimestamp="2025-09-29 13:30:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:30:56.537304673 +0000 UTC m=+3043.428824309" watchObservedRunningTime="2025-09-29 13:30:56.543501122 +0000 UTC m=+3043.435020728" Sep 29 13:31:03 crc kubenswrapper[4611]: I0929 13:31:03.745734 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:31:03 crc kubenswrapper[4611]: E0929 13:31:03.749515 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:31:04 crc kubenswrapper[4611]: I0929 13:31:04.748325 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-659c84d44d-kp6fw" Sep 29 13:31:04 crc kubenswrapper[4611]: I0929 13:31:04.803706 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m"] Sep 29 13:31:04 crc kubenswrapper[4611]: I0929 13:31:04.803948 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="manager" containerID="cri-o://158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee" gracePeriod=10 Sep 29 13:31:04 crc kubenswrapper[4611]: I0929 13:31:04.804075 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="kube-rbac-proxy" containerID="cri-o://4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333" gracePeriod=10 Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.371047 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.472331 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g44r7\" (UniqueName: \"kubernetes.io/projected/babeead3-8fa0-433e-aa7b-bed713216821-kube-api-access-g44r7\") pod \"babeead3-8fa0-433e-aa7b-bed713216821\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.472504 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-apiservice-cert\") pod \"babeead3-8fa0-433e-aa7b-bed713216821\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.472594 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-webhook-cert\") pod \"babeead3-8fa0-433e-aa7b-bed713216821\" (UID: \"babeead3-8fa0-433e-aa7b-bed713216821\") " Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.478329 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "babeead3-8fa0-433e-aa7b-bed713216821" (UID: "babeead3-8fa0-433e-aa7b-bed713216821"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.479490 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "babeead3-8fa0-433e-aa7b-bed713216821" (UID: "babeead3-8fa0-433e-aa7b-bed713216821"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.479736 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/babeead3-8fa0-433e-aa7b-bed713216821-kube-api-access-g44r7" (OuterVolumeSpecName: "kube-api-access-g44r7") pod "babeead3-8fa0-433e-aa7b-bed713216821" (UID: "babeead3-8fa0-433e-aa7b-bed713216821"). InnerVolumeSpecName "kube-api-access-g44r7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.577353 4611 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.577408 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g44r7\" (UniqueName: \"kubernetes.io/projected/babeead3-8fa0-433e-aa7b-bed713216821-kube-api-access-g44r7\") on node \"crc\" DevicePath \"\"" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.577425 4611 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/babeead3-8fa0-433e-aa7b-bed713216821-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603548 4611 generic.go:334] "Generic (PLEG): container finished" podID="babeead3-8fa0-433e-aa7b-bed713216821" containerID="4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333" exitCode=0 Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603573 4611 generic.go:334] "Generic (PLEG): container finished" podID="babeead3-8fa0-433e-aa7b-bed713216821" containerID="158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee" exitCode=0 Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603592 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" event={"ID":"babeead3-8fa0-433e-aa7b-bed713216821","Type":"ContainerDied","Data":"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333"} Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603618 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" event={"ID":"babeead3-8fa0-433e-aa7b-bed713216821","Type":"ContainerDied","Data":"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee"} Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603644 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" event={"ID":"babeead3-8fa0-433e-aa7b-bed713216821","Type":"ContainerDied","Data":"ab9721ff59996715756c6dc9b7bf788a973b8ea4f155fb4493ed56bb59b7c886"} Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603658 4611 scope.go:117] "RemoveContainer" containerID="4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.603780 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.636990 4611 scope.go:117] "RemoveContainer" containerID="158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.641655 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m"] Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.651307 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6598b66547-cl44m"] Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.656468 4611 scope.go:117] "RemoveContainer" containerID="4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333" Sep 29 13:31:05 crc kubenswrapper[4611]: E0929 13:31:05.657327 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333\": container with ID starting with 4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333 not found: ID does not exist" containerID="4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.657362 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333"} err="failed to get container status \"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333\": rpc error: code = NotFound desc = could not find container \"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333\": container with ID starting with 4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333 not found: ID does not exist" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.657388 4611 scope.go:117] "RemoveContainer" containerID="158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee" Sep 29 13:31:05 crc kubenswrapper[4611]: E0929 13:31:05.657697 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee\": container with ID starting with 158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee not found: ID does not exist" containerID="158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.657851 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee"} err="failed to get container status \"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee\": rpc error: code = NotFound desc = could not find container \"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee\": container with ID starting with 158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee not found: ID does not exist" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.657951 4611 scope.go:117] "RemoveContainer" containerID="4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.658290 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333"} err="failed to get container status \"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333\": rpc error: code = NotFound desc = could not find container \"4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333\": container with ID starting with 4dec7b9d34d200ec320830aa9c449c70df6fa4a4581764d638b16b9e4632a333 not found: ID does not exist" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.658315 4611 scope.go:117] "RemoveContainer" containerID="158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.658505 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee"} err="failed to get container status \"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee\": rpc error: code = NotFound desc = could not find container \"158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee\": container with ID starting with 158bb8e1a98242e4e3ab806f8857a3ba8883041dab4595f7692346f70919beee not found: ID does not exist" Sep 29 13:31:05 crc kubenswrapper[4611]: I0929 13:31:05.746552 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="babeead3-8fa0-433e-aa7b-bed713216821" path="/var/lib/kubelet/pods/babeead3-8fa0-433e-aa7b-bed713216821/volumes" Sep 29 13:31:14 crc kubenswrapper[4611]: I0929 13:31:14.737063 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:31:14 crc kubenswrapper[4611]: E0929 13:31:14.737766 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.199004 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-r2rqs"] Sep 29 13:31:22 crc kubenswrapper[4611]: E0929 13:31:22.199850 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="manager" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.199862 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="manager" Sep 29 13:31:22 crc kubenswrapper[4611]: E0929 13:31:22.199881 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="kube-rbac-proxy" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.199888 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="kube-rbac-proxy" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.200108 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="kube-rbac-proxy" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.200121 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="babeead3-8fa0-433e-aa7b-bed713216821" containerName="manager" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.200829 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.215414 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-r2rqs"] Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.307186 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjfz8\" (UniqueName: \"kubernetes.io/projected/64f5bc16-a580-40ef-9e8a-506fc4714c2b-kube-api-access-gjfz8\") pod \"watcher-db-create-r2rqs\" (UID: \"64f5bc16-a580-40ef-9e8a-506fc4714c2b\") " pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.409242 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjfz8\" (UniqueName: \"kubernetes.io/projected/64f5bc16-a580-40ef-9e8a-506fc4714c2b-kube-api-access-gjfz8\") pod \"watcher-db-create-r2rqs\" (UID: \"64f5bc16-a580-40ef-9e8a-506fc4714c2b\") " pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.435445 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjfz8\" (UniqueName: \"kubernetes.io/projected/64f5bc16-a580-40ef-9e8a-506fc4714c2b-kube-api-access-gjfz8\") pod \"watcher-db-create-r2rqs\" (UID: \"64f5bc16-a580-40ef-9e8a-506fc4714c2b\") " pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:22 crc kubenswrapper[4611]: I0929 13:31:22.592560 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:23 crc kubenswrapper[4611]: I0929 13:31:23.093417 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-r2rqs"] Sep 29 13:31:23 crc kubenswrapper[4611]: I0929 13:31:23.780276 4611 generic.go:334] "Generic (PLEG): container finished" podID="64f5bc16-a580-40ef-9e8a-506fc4714c2b" containerID="27ce1890bb10c0b4cb526ea94c45495f92f3b1d7d232bf8fa5bf31c18dbedc8a" exitCode=0 Sep 29 13:31:23 crc kubenswrapper[4611]: I0929 13:31:23.780581 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-r2rqs" event={"ID":"64f5bc16-a580-40ef-9e8a-506fc4714c2b","Type":"ContainerDied","Data":"27ce1890bb10c0b4cb526ea94c45495f92f3b1d7d232bf8fa5bf31c18dbedc8a"} Sep 29 13:31:23 crc kubenswrapper[4611]: I0929 13:31:23.780639 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-r2rqs" event={"ID":"64f5bc16-a580-40ef-9e8a-506fc4714c2b","Type":"ContainerStarted","Data":"a31b555bee487974fd235a506acd68e359e3acaa81350dc64f11e4ddde1df779"} Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.241068 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.402155 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjfz8\" (UniqueName: \"kubernetes.io/projected/64f5bc16-a580-40ef-9e8a-506fc4714c2b-kube-api-access-gjfz8\") pod \"64f5bc16-a580-40ef-9e8a-506fc4714c2b\" (UID: \"64f5bc16-a580-40ef-9e8a-506fc4714c2b\") " Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.409334 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64f5bc16-a580-40ef-9e8a-506fc4714c2b-kube-api-access-gjfz8" (OuterVolumeSpecName: "kube-api-access-gjfz8") pod "64f5bc16-a580-40ef-9e8a-506fc4714c2b" (UID: "64f5bc16-a580-40ef-9e8a-506fc4714c2b"). InnerVolumeSpecName "kube-api-access-gjfz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.504240 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjfz8\" (UniqueName: \"kubernetes.io/projected/64f5bc16-a580-40ef-9e8a-506fc4714c2b-kube-api-access-gjfz8\") on node \"crc\" DevicePath \"\"" Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.804789 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-r2rqs" event={"ID":"64f5bc16-a580-40ef-9e8a-506fc4714c2b","Type":"ContainerDied","Data":"a31b555bee487974fd235a506acd68e359e3acaa81350dc64f11e4ddde1df779"} Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.804842 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a31b555bee487974fd235a506acd68e359e3acaa81350dc64f11e4ddde1df779" Sep 29 13:31:25 crc kubenswrapper[4611]: I0929 13:31:25.804915 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-r2rqs" Sep 29 13:31:26 crc kubenswrapper[4611]: I0929 13:31:26.736652 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:31:26 crc kubenswrapper[4611]: E0929 13:31:26.737923 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.225531 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-cc47-account-create-bqg97"] Sep 29 13:31:32 crc kubenswrapper[4611]: E0929 13:31:32.226468 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64f5bc16-a580-40ef-9e8a-506fc4714c2b" containerName="mariadb-database-create" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.226480 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="64f5bc16-a580-40ef-9e8a-506fc4714c2b" containerName="mariadb-database-create" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.226680 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="64f5bc16-a580-40ef-9e8a-506fc4714c2b" containerName="mariadb-database-create" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.227340 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.229999 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.237080 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-cc47-account-create-bqg97"] Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.331516 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7fhc\" (UniqueName: \"kubernetes.io/projected/dc37efcb-2804-4a6f-a41e-82cdc2c608b9-kube-api-access-r7fhc\") pod \"watcher-cc47-account-create-bqg97\" (UID: \"dc37efcb-2804-4a6f-a41e-82cdc2c608b9\") " pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.435796 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7fhc\" (UniqueName: \"kubernetes.io/projected/dc37efcb-2804-4a6f-a41e-82cdc2c608b9-kube-api-access-r7fhc\") pod \"watcher-cc47-account-create-bqg97\" (UID: \"dc37efcb-2804-4a6f-a41e-82cdc2c608b9\") " pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.453369 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7fhc\" (UniqueName: \"kubernetes.io/projected/dc37efcb-2804-4a6f-a41e-82cdc2c608b9-kube-api-access-r7fhc\") pod \"watcher-cc47-account-create-bqg97\" (UID: \"dc37efcb-2804-4a6f-a41e-82cdc2c608b9\") " pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:32 crc kubenswrapper[4611]: I0929 13:31:32.597890 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:33 crc kubenswrapper[4611]: I0929 13:31:33.062989 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-cc47-account-create-bqg97"] Sep 29 13:31:33 crc kubenswrapper[4611]: I0929 13:31:33.886293 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-cc47-account-create-bqg97" event={"ID":"dc37efcb-2804-4a6f-a41e-82cdc2c608b9","Type":"ContainerStarted","Data":"141d3f8a936186d819ca527dfc3ef9a957ba86c9bac4450d8b1c282ee893de8d"} Sep 29 13:31:33 crc kubenswrapper[4611]: I0929 13:31:33.886603 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-cc47-account-create-bqg97" event={"ID":"dc37efcb-2804-4a6f-a41e-82cdc2c608b9","Type":"ContainerStarted","Data":"3994361f2fe8ae7db8d99c755ee7f6d2b1e24f5535822181728a4a618f3a5746"} Sep 29 13:31:33 crc kubenswrapper[4611]: I0929 13:31:33.907345 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-cc47-account-create-bqg97" podStartSLOduration=1.907323128 podStartE2EDuration="1.907323128s" podCreationTimestamp="2025-09-29 13:31:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:31:33.899138172 +0000 UTC m=+3080.790657788" watchObservedRunningTime="2025-09-29 13:31:33.907323128 +0000 UTC m=+3080.798842734" Sep 29 13:31:34 crc kubenswrapper[4611]: I0929 13:31:34.897917 4611 generic.go:334] "Generic (PLEG): container finished" podID="dc37efcb-2804-4a6f-a41e-82cdc2c608b9" containerID="141d3f8a936186d819ca527dfc3ef9a957ba86c9bac4450d8b1c282ee893de8d" exitCode=0 Sep 29 13:31:34 crc kubenswrapper[4611]: I0929 13:31:34.897969 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-cc47-account-create-bqg97" event={"ID":"dc37efcb-2804-4a6f-a41e-82cdc2c608b9","Type":"ContainerDied","Data":"141d3f8a936186d819ca527dfc3ef9a957ba86c9bac4450d8b1c282ee893de8d"} Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.238287 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.313594 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7fhc\" (UniqueName: \"kubernetes.io/projected/dc37efcb-2804-4a6f-a41e-82cdc2c608b9-kube-api-access-r7fhc\") pod \"dc37efcb-2804-4a6f-a41e-82cdc2c608b9\" (UID: \"dc37efcb-2804-4a6f-a41e-82cdc2c608b9\") " Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.319670 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc37efcb-2804-4a6f-a41e-82cdc2c608b9-kube-api-access-r7fhc" (OuterVolumeSpecName: "kube-api-access-r7fhc") pod "dc37efcb-2804-4a6f-a41e-82cdc2c608b9" (UID: "dc37efcb-2804-4a6f-a41e-82cdc2c608b9"). InnerVolumeSpecName "kube-api-access-r7fhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.415822 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7fhc\" (UniqueName: \"kubernetes.io/projected/dc37efcb-2804-4a6f-a41e-82cdc2c608b9-kube-api-access-r7fhc\") on node \"crc\" DevicePath \"\"" Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.927545 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-cc47-account-create-bqg97" event={"ID":"dc37efcb-2804-4a6f-a41e-82cdc2c608b9","Type":"ContainerDied","Data":"3994361f2fe8ae7db8d99c755ee7f6d2b1e24f5535822181728a4a618f3a5746"} Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.927585 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3994361f2fe8ae7db8d99c755ee7f6d2b1e24f5535822181728a4a618f3a5746" Sep 29 13:31:36 crc kubenswrapper[4611]: I0929 13:31:36.927583 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-cc47-account-create-bqg97" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.572857 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-x2b9f"] Sep 29 13:31:37 crc kubenswrapper[4611]: E0929 13:31:37.573621 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc37efcb-2804-4a6f-a41e-82cdc2c608b9" containerName="mariadb-account-create" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.573636 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc37efcb-2804-4a6f-a41e-82cdc2c608b9" containerName="mariadb-account-create" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.573855 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc37efcb-2804-4a6f-a41e-82cdc2c608b9" containerName="mariadb-account-create" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.574529 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.576481 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-zs45j" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.577069 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.583053 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-x2b9f"] Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.639502 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44mtr\" (UniqueName: \"kubernetes.io/projected/707d4443-620a-49ae-98b1-c45b03681060-kube-api-access-44mtr\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.639567 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-combined-ca-bundle\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.639589 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-db-sync-config-data\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.639615 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-config-data\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.741192 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44mtr\" (UniqueName: \"kubernetes.io/projected/707d4443-620a-49ae-98b1-c45b03681060-kube-api-access-44mtr\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.741275 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-combined-ca-bundle\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.742301 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-db-sync-config-data\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.742388 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-config-data\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.750238 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-combined-ca-bundle\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.751299 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-config-data\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.751482 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-db-sync-config-data\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.765157 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44mtr\" (UniqueName: \"kubernetes.io/projected/707d4443-620a-49ae-98b1-c45b03681060-kube-api-access-44mtr\") pod \"watcher-db-sync-x2b9f\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.793438 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b7c6854c4-jrwd9"] Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.797013 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.846428 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-scripts\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.846652 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs7kj\" (UniqueName: \"kubernetes.io/projected/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-kube-api-access-fs7kj\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.846726 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-config-data\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.846810 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-combined-ca-bundle\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.846906 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-horizon-tls-certs\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.846984 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-logs\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.847142 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-horizon-secret-key\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.889222 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b7c6854c4-jrwd9"] Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.902673 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949115 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-scripts\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949164 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs7kj\" (UniqueName: \"kubernetes.io/projected/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-kube-api-access-fs7kj\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949190 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-config-data\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949295 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-combined-ca-bundle\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949342 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-horizon-tls-certs\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949361 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-logs\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.949456 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-horizon-secret-key\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.955291 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-config-data\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.955771 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-scripts\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.956263 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-logs\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.957065 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-horizon-secret-key\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.966249 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-horizon-tls-certs\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.970096 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-combined-ca-bundle\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:37 crc kubenswrapper[4611]: I0929 13:31:37.977038 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs7kj\" (UniqueName: \"kubernetes.io/projected/c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a-kube-api-access-fs7kj\") pod \"horizon-7b7c6854c4-jrwd9\" (UID: \"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a\") " pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:38 crc kubenswrapper[4611]: I0929 13:31:38.194007 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:38 crc kubenswrapper[4611]: I0929 13:31:38.605820 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-x2b9f"] Sep 29 13:31:38 crc kubenswrapper[4611]: I0929 13:31:38.838510 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b7c6854c4-jrwd9"] Sep 29 13:31:38 crc kubenswrapper[4611]: W0929 13:31:38.844999 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0b68cce_e4c2_4291_bbc5_0d096d1e0b8a.slice/crio-18702d1fc35b9eee63b871ed24a784e3148a22284ea98e7db2b2086a8a13d9bc WatchSource:0}: Error finding container 18702d1fc35b9eee63b871ed24a784e3148a22284ea98e7db2b2086a8a13d9bc: Status 404 returned error can't find the container with id 18702d1fc35b9eee63b871ed24a784e3148a22284ea98e7db2b2086a8a13d9bc Sep 29 13:31:38 crc kubenswrapper[4611]: I0929 13:31:38.967041 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b7c6854c4-jrwd9" event={"ID":"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a","Type":"ContainerStarted","Data":"18702d1fc35b9eee63b871ed24a784e3148a22284ea98e7db2b2086a8a13d9bc"} Sep 29 13:31:38 crc kubenswrapper[4611]: I0929 13:31:38.968823 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-x2b9f" event={"ID":"707d4443-620a-49ae-98b1-c45b03681060","Type":"ContainerStarted","Data":"60423cb1c0a3cd01c22253690caca27b3aa98f3e419cd0287197aa6a14b913d6"} Sep 29 13:31:39 crc kubenswrapper[4611]: I0929 13:31:39.737983 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:31:39 crc kubenswrapper[4611]: E0929 13:31:39.738531 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:31:39 crc kubenswrapper[4611]: I0929 13:31:39.981139 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b7c6854c4-jrwd9" event={"ID":"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a","Type":"ContainerStarted","Data":"90a4d87c317510976be1c397c9090e0296c934958b55075a2fa632f0c4eaaed1"} Sep 29 13:31:39 crc kubenswrapper[4611]: I0929 13:31:39.981190 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b7c6854c4-jrwd9" event={"ID":"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a","Type":"ContainerStarted","Data":"7b789f88b0ea7ce33e1fbfad30e2f74239bbb72516194e00dcf7e1dae8cb820c"} Sep 29 13:31:40 crc kubenswrapper[4611]: I0929 13:31:40.010066 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b7c6854c4-jrwd9" podStartSLOduration=3.010041089 podStartE2EDuration="3.010041089s" podCreationTimestamp="2025-09-29 13:31:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:31:40.004085027 +0000 UTC m=+3086.895604633" watchObservedRunningTime="2025-09-29 13:31:40.010041089 +0000 UTC m=+3086.901560695" Sep 29 13:31:48 crc kubenswrapper[4611]: I0929 13:31:48.195041 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:48 crc kubenswrapper[4611]: I0929 13:31:48.195580 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:31:51 crc kubenswrapper[4611]: I0929 13:31:51.735989 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:31:51 crc kubenswrapper[4611]: E0929 13:31:51.736774 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:31:53 crc kubenswrapper[4611]: I0929 13:31:53.148685 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-x2b9f" event={"ID":"707d4443-620a-49ae-98b1-c45b03681060","Type":"ContainerStarted","Data":"3aba9c22096497c5d73f348dfcce69511854a20dd59b16f1d9ba3e8bcb59a691"} Sep 29 13:31:53 crc kubenswrapper[4611]: I0929 13:31:53.167735 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-x2b9f" podStartSLOduration=2.057437255 podStartE2EDuration="16.167715535s" podCreationTimestamp="2025-09-29 13:31:37 +0000 UTC" firstStartedPulling="2025-09-29 13:31:38.615876993 +0000 UTC m=+3085.507396599" lastFinishedPulling="2025-09-29 13:31:52.726155273 +0000 UTC m=+3099.617674879" observedRunningTime="2025-09-29 13:31:53.163480763 +0000 UTC m=+3100.055000369" watchObservedRunningTime="2025-09-29 13:31:53.167715535 +0000 UTC m=+3100.059235141" Sep 29 13:31:58 crc kubenswrapper[4611]: I0929 13:31:58.197918 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b7c6854c4-jrwd9" podUID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.6:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.6:8443: connect: connection refused" Sep 29 13:32:02 crc kubenswrapper[4611]: I0929 13:32:02.738546 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:32:02 crc kubenswrapper[4611]: E0929 13:32:02.739555 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:32:04 crc kubenswrapper[4611]: I0929 13:32:04.250127 4611 generic.go:334] "Generic (PLEG): container finished" podID="707d4443-620a-49ae-98b1-c45b03681060" containerID="3aba9c22096497c5d73f348dfcce69511854a20dd59b16f1d9ba3e8bcb59a691" exitCode=0 Sep 29 13:32:04 crc kubenswrapper[4611]: I0929 13:32:04.250395 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-x2b9f" event={"ID":"707d4443-620a-49ae-98b1-c45b03681060","Type":"ContainerDied","Data":"3aba9c22096497c5d73f348dfcce69511854a20dd59b16f1d9ba3e8bcb59a691"} Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.681368 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.823319 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-db-sync-config-data\") pod \"707d4443-620a-49ae-98b1-c45b03681060\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.823723 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44mtr\" (UniqueName: \"kubernetes.io/projected/707d4443-620a-49ae-98b1-c45b03681060-kube-api-access-44mtr\") pod \"707d4443-620a-49ae-98b1-c45b03681060\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.823900 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-config-data\") pod \"707d4443-620a-49ae-98b1-c45b03681060\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.823943 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-combined-ca-bundle\") pod \"707d4443-620a-49ae-98b1-c45b03681060\" (UID: \"707d4443-620a-49ae-98b1-c45b03681060\") " Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.830063 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "707d4443-620a-49ae-98b1-c45b03681060" (UID: "707d4443-620a-49ae-98b1-c45b03681060"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.845863 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707d4443-620a-49ae-98b1-c45b03681060-kube-api-access-44mtr" (OuterVolumeSpecName: "kube-api-access-44mtr") pod "707d4443-620a-49ae-98b1-c45b03681060" (UID: "707d4443-620a-49ae-98b1-c45b03681060"). InnerVolumeSpecName "kube-api-access-44mtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.857420 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "707d4443-620a-49ae-98b1-c45b03681060" (UID: "707d4443-620a-49ae-98b1-c45b03681060"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.890691 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-config-data" (OuterVolumeSpecName: "config-data") pod "707d4443-620a-49ae-98b1-c45b03681060" (UID: "707d4443-620a-49ae-98b1-c45b03681060"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.926993 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.927041 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.927056 4611 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/707d4443-620a-49ae-98b1-c45b03681060-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:05 crc kubenswrapper[4611]: I0929 13:32:05.927068 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44mtr\" (UniqueName: \"kubernetes.io/projected/707d4443-620a-49ae-98b1-c45b03681060-kube-api-access-44mtr\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.272708 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-x2b9f" event={"ID":"707d4443-620a-49ae-98b1-c45b03681060","Type":"ContainerDied","Data":"60423cb1c0a3cd01c22253690caca27b3aa98f3e419cd0287197aa6a14b913d6"} Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.272744 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60423cb1c0a3cd01c22253690caca27b3aa98f3e419cd0287197aa6a14b913d6" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.272801 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-x2b9f" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.644908 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 13:32:06 crc kubenswrapper[4611]: E0929 13:32:06.645325 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707d4443-620a-49ae-98b1-c45b03681060" containerName="watcher-db-sync" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.645345 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="707d4443-620a-49ae-98b1-c45b03681060" containerName="watcher-db-sync" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.645556 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="707d4443-620a-49ae-98b1-c45b03681060" containerName="watcher-db-sync" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.646218 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.652475 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.655405 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-zs45j" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.660666 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.741472 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.741575 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59308f5-6c15-48a5-b730-1543849afa05-logs\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.741611 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.741646 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzdj6\" (UniqueName: \"kubernetes.io/projected/f59308f5-6c15-48a5-b730-1543849afa05-kube-api-access-pzdj6\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.741666 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.749547 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.751314 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.757571 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.777289 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.821593 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.823336 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.828072 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.845835 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-config-data\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.845886 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.845930 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.845969 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd7h6\" (UniqueName: \"kubernetes.io/projected/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-kube-api-access-pd7h6\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.846006 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59308f5-6c15-48a5-b730-1543849afa05-logs\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.846044 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.846060 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzdj6\" (UniqueName: \"kubernetes.io/projected/f59308f5-6c15-48a5-b730-1543849afa05-kube-api-access-pzdj6\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.846077 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.846097 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-logs\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.848405 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59308f5-6c15-48a5-b730-1543849afa05-logs\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.852202 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.853246 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.858435 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59308f5-6c15-48a5-b730-1543849afa05-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.878437 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzdj6\" (UniqueName: \"kubernetes.io/projected/f59308f5-6c15-48a5-b730-1543849afa05-kube-api-access-pzdj6\") pod \"watcher-decision-engine-0\" (UID: \"f59308f5-6c15-48a5-b730-1543849afa05\") " pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.879312 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.947880 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-config-data\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.947944 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr7q4\" (UniqueName: \"kubernetes.io/projected/1d3c3ace-8166-40ee-aee1-76bca836555e-kube-api-access-rr7q4\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.947968 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.947993 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.948028 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d3c3ace-8166-40ee-aee1-76bca836555e-logs\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.948042 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.948102 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd7h6\" (UniqueName: \"kubernetes.io/projected/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-kube-api-access-pd7h6\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.948134 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-config-data\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.948182 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-logs\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.948609 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-logs\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.951085 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.952131 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-config-data\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.969060 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 13:32:06 crc kubenswrapper[4611]: I0929 13:32:06.989068 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd7h6\" (UniqueName: \"kubernetes.io/projected/6c448e57-326a-4c7d-9e44-be5cf8afd7ea-kube-api-access-pd7h6\") pod \"watcher-applier-0\" (UID: \"6c448e57-326a-4c7d-9e44-be5cf8afd7ea\") " pod="openstack/watcher-applier-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.050331 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-config-data\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.050489 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr7q4\" (UniqueName: \"kubernetes.io/projected/1d3c3ace-8166-40ee-aee1-76bca836555e-kube-api-access-rr7q4\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.050517 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.050544 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d3c3ace-8166-40ee-aee1-76bca836555e-logs\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.050559 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.051397 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d3c3ace-8166-40ee-aee1-76bca836555e-logs\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.055474 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-config-data\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.058999 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.063873 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3c3ace-8166-40ee-aee1-76bca836555e-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.065374 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.092230 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr7q4\" (UniqueName: \"kubernetes.io/projected/1d3c3ace-8166-40ee-aee1-76bca836555e-kube-api-access-rr7q4\") pod \"watcher-api-0\" (UID: \"1d3c3ace-8166-40ee-aee1-76bca836555e\") " pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.141122 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 13:32:07 crc kubenswrapper[4611]: I0929 13:32:07.732778 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 13:32:08 crc kubenswrapper[4611]: I0929 13:32:08.014756 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 13:32:08 crc kubenswrapper[4611]: I0929 13:32:08.159575 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 13:32:08 crc kubenswrapper[4611]: I0929 13:32:08.195798 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b7c6854c4-jrwd9" podUID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.6:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.6:8443: connect: connection refused" Sep 29 13:32:08 crc kubenswrapper[4611]: I0929 13:32:08.303786 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"1d3c3ace-8166-40ee-aee1-76bca836555e","Type":"ContainerStarted","Data":"7ff21f81f0b4fe8286d67889db039e47264c66a2ee3d8dac4bf15b494a03d44a"} Sep 29 13:32:08 crc kubenswrapper[4611]: I0929 13:32:08.306373 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"6c448e57-326a-4c7d-9e44-be5cf8afd7ea","Type":"ContainerStarted","Data":"47a27c7dcbca1a0437dda626d37ac9617f8482fad68c47e483676bb103f85246"} Sep 29 13:32:08 crc kubenswrapper[4611]: I0929 13:32:08.310572 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f59308f5-6c15-48a5-b730-1543849afa05","Type":"ContainerStarted","Data":"1bed5a50b1a7e78d587bd8f67ae235e66dc77d3130dfb8d56c0dd58acde5b4ef"} Sep 29 13:32:09 crc kubenswrapper[4611]: I0929 13:32:09.321220 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"1d3c3ace-8166-40ee-aee1-76bca836555e","Type":"ContainerStarted","Data":"3f36bb2822c4964d20c5735d9fd6bf68fe99b60b6f28a5b63aac48f246714614"} Sep 29 13:32:09 crc kubenswrapper[4611]: I0929 13:32:09.323183 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 13:32:09 crc kubenswrapper[4611]: I0929 13:32:09.323210 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"1d3c3ace-8166-40ee-aee1-76bca836555e","Type":"ContainerStarted","Data":"2fdd5c4e206b93c917551e68c9425eca1978d9861e84c7931a6d106a48f9e1bb"} Sep 29 13:32:12 crc kubenswrapper[4611]: I0929 13:32:12.141686 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 13:32:12 crc kubenswrapper[4611]: I0929 13:32:12.142130 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:32:13 crc kubenswrapper[4611]: I0929 13:32:13.362438 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"6c448e57-326a-4c7d-9e44-be5cf8afd7ea","Type":"ContainerStarted","Data":"aae0c7627d85ba7df2146a2024dec3dc6b4c0e498828eb1f90aea8f31704dd5a"} Sep 29 13:32:13 crc kubenswrapper[4611]: I0929 13:32:13.364057 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f59308f5-6c15-48a5-b730-1543849afa05","Type":"ContainerStarted","Data":"6c3aa5a3f74a0f7114df172ac7a263bf6cd14a666dcb7797202191d17c6c6832"} Sep 29 13:32:13 crc kubenswrapper[4611]: I0929 13:32:13.774750 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=7.774726836 podStartE2EDuration="7.774726836s" podCreationTimestamp="2025-09-29 13:32:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:32:09.348393908 +0000 UTC m=+3116.239913534" watchObservedRunningTime="2025-09-29 13:32:13.774726836 +0000 UTC m=+3120.666246442" Sep 29 13:32:14 crc kubenswrapper[4611]: I0929 13:32:14.363834 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="1d3c3ace-8166-40ee-aee1-76bca836555e" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.1.9:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:32:14 crc kubenswrapper[4611]: I0929 13:32:14.396029 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=3.318682899 podStartE2EDuration="8.395990863s" podCreationTimestamp="2025-09-29 13:32:06 +0000 UTC" firstStartedPulling="2025-09-29 13:32:07.734913382 +0000 UTC m=+3114.626432988" lastFinishedPulling="2025-09-29 13:32:12.812221346 +0000 UTC m=+3119.703740952" observedRunningTime="2025-09-29 13:32:14.395992483 +0000 UTC m=+3121.287512099" watchObservedRunningTime="2025-09-29 13:32:14.395990863 +0000 UTC m=+3121.287510469" Sep 29 13:32:14 crc kubenswrapper[4611]: I0929 13:32:14.417845 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=3.641265534 podStartE2EDuration="8.417822844s" podCreationTimestamp="2025-09-29 13:32:06 +0000 UTC" firstStartedPulling="2025-09-29 13:32:08.042261036 +0000 UTC m=+3114.933780642" lastFinishedPulling="2025-09-29 13:32:12.818818346 +0000 UTC m=+3119.710337952" observedRunningTime="2025-09-29 13:32:14.413297793 +0000 UTC m=+3121.304817399" watchObservedRunningTime="2025-09-29 13:32:14.417822844 +0000 UTC m=+3121.309342450" Sep 29 13:32:16 crc kubenswrapper[4611]: I0929 13:32:16.736243 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:32:16 crc kubenswrapper[4611]: E0929 13:32:16.736836 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:32:16 crc kubenswrapper[4611]: I0929 13:32:16.969280 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.004297 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.066024 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.066070 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.097953 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.144977 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.145074 4611 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.210083 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/watcher-api-0" podUID="1d3c3ace-8166-40ee-aee1-76bca836555e" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.1.9:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.397051 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.430265 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Sep 29 13:32:17 crc kubenswrapper[4611]: I0929 13:32:17.447098 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 29 13:32:18 crc kubenswrapper[4611]: I0929 13:32:18.154390 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/watcher-api-0" podUID="1d3c3ace-8166-40ee-aee1-76bca836555e" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.1.9:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:32:18 crc kubenswrapper[4611]: I0929 13:32:18.154572 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 29 13:32:23 crc kubenswrapper[4611]: I0929 13:32:23.200945 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b7c6854c4-jrwd9" podUID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.6:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 13:32:23 crc kubenswrapper[4611]: I0929 13:32:23.202063 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:32:23 crc kubenswrapper[4611]: I0929 13:32:23.203762 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"90a4d87c317510976be1c397c9090e0296c934958b55075a2fa632f0c4eaaed1"} pod="openstack/horizon-7b7c6854c4-jrwd9" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 13:32:23 crc kubenswrapper[4611]: I0929 13:32:23.203830 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b7c6854c4-jrwd9" podUID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerName="horizon" containerID="cri-o://90a4d87c317510976be1c397c9090e0296c934958b55075a2fa632f0c4eaaed1" gracePeriod=30 Sep 29 13:32:27 crc kubenswrapper[4611]: I0929 13:32:27.147978 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Sep 29 13:32:27 crc kubenswrapper[4611]: I0929 13:32:27.154003 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 29 13:32:28 crc kubenswrapper[4611]: I0929 13:32:28.538306 4611 generic.go:334] "Generic (PLEG): container finished" podID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerID="90a4d87c317510976be1c397c9090e0296c934958b55075a2fa632f0c4eaaed1" exitCode=0 Sep 29 13:32:28 crc kubenswrapper[4611]: I0929 13:32:28.538582 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b7c6854c4-jrwd9" event={"ID":"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a","Type":"ContainerDied","Data":"90a4d87c317510976be1c397c9090e0296c934958b55075a2fa632f0c4eaaed1"} Sep 29 13:32:29 crc kubenswrapper[4611]: I0929 13:32:29.555667 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b7c6854c4-jrwd9" event={"ID":"c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a","Type":"ContainerStarted","Data":"d7a0f2a2e0a672eaaec0668f08291f27e2eee6737e0f4244f623e6166794feca"} Sep 29 13:32:29 crc kubenswrapper[4611]: I0929 13:32:29.979830 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:32:29 crc kubenswrapper[4611]: I0929 13:32:29.981303 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-central-agent" containerID="cri-o://bd6551d1fa5c534d534ad3bc12cd9c5c937482943902f3332d4f0b9f8862bdcd" gracePeriod=30 Sep 29 13:32:29 crc kubenswrapper[4611]: I0929 13:32:29.982007 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="proxy-httpd" containerID="cri-o://8f13094a4d1d946b9bd6f0beff37f0ef63278ddfaa19db88671e921a51ae36dd" gracePeriod=30 Sep 29 13:32:29 crc kubenswrapper[4611]: I0929 13:32:29.982094 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="sg-core" containerID="cri-o://0eb6fc18e2acf53303a64c9252161bed2f90f2b3ce2ddf74372d39a82bece954" gracePeriod=30 Sep 29 13:32:29 crc kubenswrapper[4611]: I0929 13:32:29.982164 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-notification-agent" containerID="cri-o://6a4291c400687a8859bca93b8773a6865f9de8532f23fe5ceddcf8dad9b2846f" gracePeriod=30 Sep 29 13:32:30 crc kubenswrapper[4611]: I0929 13:32:30.569875 4611 generic.go:334] "Generic (PLEG): container finished" podID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerID="8f13094a4d1d946b9bd6f0beff37f0ef63278ddfaa19db88671e921a51ae36dd" exitCode=0 Sep 29 13:32:30 crc kubenswrapper[4611]: I0929 13:32:30.569944 4611 generic.go:334] "Generic (PLEG): container finished" podID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerID="0eb6fc18e2acf53303a64c9252161bed2f90f2b3ce2ddf74372d39a82bece954" exitCode=2 Sep 29 13:32:30 crc kubenswrapper[4611]: I0929 13:32:30.570043 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerDied","Data":"8f13094a4d1d946b9bd6f0beff37f0ef63278ddfaa19db88671e921a51ae36dd"} Sep 29 13:32:30 crc kubenswrapper[4611]: I0929 13:32:30.570101 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerDied","Data":"0eb6fc18e2acf53303a64c9252161bed2f90f2b3ce2ddf74372d39a82bece954"} Sep 29 13:32:31 crc kubenswrapper[4611]: I0929 13:32:31.582968 4611 generic.go:334] "Generic (PLEG): container finished" podID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerID="bd6551d1fa5c534d534ad3bc12cd9c5c937482943902f3332d4f0b9f8862bdcd" exitCode=0 Sep 29 13:32:31 crc kubenswrapper[4611]: I0929 13:32:31.583286 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerDied","Data":"bd6551d1fa5c534d534ad3bc12cd9c5c937482943902f3332d4f0b9f8862bdcd"} Sep 29 13:32:31 crc kubenswrapper[4611]: I0929 13:32:31.736016 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:32:31 crc kubenswrapper[4611]: E0929 13:32:31.736294 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:32:35 crc kubenswrapper[4611]: I0929 13:32:35.653481 4611 generic.go:334] "Generic (PLEG): container finished" podID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerID="6a4291c400687a8859bca93b8773a6865f9de8532f23fe5ceddcf8dad9b2846f" exitCode=0 Sep 29 13:32:35 crc kubenswrapper[4611]: I0929 13:32:35.653537 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerDied","Data":"6a4291c400687a8859bca93b8773a6865f9de8532f23fe5ceddcf8dad9b2846f"} Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.354279 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.507291 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-log-httpd\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.507549 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-config-data\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.507619 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-combined-ca-bundle\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.507744 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-ceilometer-tls-certs\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.507775 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-sg-core-conf-yaml\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.507849 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-scripts\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.508011 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-run-httpd\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.508082 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8bl6\" (UniqueName: \"kubernetes.io/projected/72b8cd6d-9dfa-483d-9634-09df584bf7ed-kube-api-access-f8bl6\") pod \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\" (UID: \"72b8cd6d-9dfa-483d-9634-09df584bf7ed\") " Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.543931 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.547416 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.611103 4611 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.611164 4611 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72b8cd6d-9dfa-483d-9634-09df584bf7ed-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.612030 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-scripts" (OuterVolumeSpecName: "scripts") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.623851 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.649751 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72b8cd6d-9dfa-483d-9634-09df584bf7ed-kube-api-access-f8bl6" (OuterVolumeSpecName: "kube-api-access-f8bl6") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "kube-api-access-f8bl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.656781 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.666140 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72b8cd6d-9dfa-483d-9634-09df584bf7ed","Type":"ContainerDied","Data":"4f3c5d454477ef0995659f62f2993f7c8c0489a6d34634d2af78357c11097ad4"} Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.666310 4611 scope.go:117] "RemoveContainer" containerID="8f13094a4d1d946b9bd6f0beff37f0ef63278ddfaa19db88671e921a51ae36dd" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.666548 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.713599 4611 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.713954 4611 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.713966 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.713980 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8bl6\" (UniqueName: \"kubernetes.io/projected/72b8cd6d-9dfa-483d-9634-09df584bf7ed-kube-api-access-f8bl6\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.715062 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.728835 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-config-data" (OuterVolumeSpecName: "config-data") pod "72b8cd6d-9dfa-483d-9634-09df584bf7ed" (UID: "72b8cd6d-9dfa-483d-9634-09df584bf7ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.793007 4611 scope.go:117] "RemoveContainer" containerID="0eb6fc18e2acf53303a64c9252161bed2f90f2b3ce2ddf74372d39a82bece954" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.815392 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.815468 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b8cd6d-9dfa-483d-9634-09df584bf7ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.820599 4611 scope.go:117] "RemoveContainer" containerID="6a4291c400687a8859bca93b8773a6865f9de8532f23fe5ceddcf8dad9b2846f" Sep 29 13:32:36 crc kubenswrapper[4611]: I0929 13:32:36.863047 4611 scope.go:117] "RemoveContainer" containerID="bd6551d1fa5c534d534ad3bc12cd9c5c937482943902f3332d4f0b9f8862bdcd" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.023455 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.051329 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.057996 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:32:37 crc kubenswrapper[4611]: E0929 13:32:37.058481 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-notification-agent" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058506 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-notification-agent" Sep 29 13:32:37 crc kubenswrapper[4611]: E0929 13:32:37.058532 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="proxy-httpd" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058542 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="proxy-httpd" Sep 29 13:32:37 crc kubenswrapper[4611]: E0929 13:32:37.058571 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-central-agent" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058580 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-central-agent" Sep 29 13:32:37 crc kubenswrapper[4611]: E0929 13:32:37.058601 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="sg-core" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058609 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="sg-core" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058854 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="sg-core" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058886 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-central-agent" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058901 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="proxy-httpd" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.058924 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" containerName="ceilometer-notification-agent" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.062303 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.068353 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.068590 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.068859 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.084097 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119324 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119389 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-config-data\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119415 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/335e7bb5-075d-48d3-9fa4-3570660b0b28-log-httpd\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119449 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/335e7bb5-075d-48d3-9fa4-3570660b0b28-run-httpd\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119485 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119542 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-scripts\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119564 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.119603 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brfcf\" (UniqueName: \"kubernetes.io/projected/335e7bb5-075d-48d3-9fa4-3570660b0b28-kube-api-access-brfcf\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221009 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/335e7bb5-075d-48d3-9fa4-3570660b0b28-run-httpd\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221097 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221138 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-scripts\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221160 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221201 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brfcf\" (UniqueName: \"kubernetes.io/projected/335e7bb5-075d-48d3-9fa4-3570660b0b28-kube-api-access-brfcf\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221334 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221377 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-config-data\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221404 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/335e7bb5-075d-48d3-9fa4-3570660b0b28-log-httpd\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221491 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/335e7bb5-075d-48d3-9fa4-3570660b0b28-run-httpd\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.221901 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/335e7bb5-075d-48d3-9fa4-3570660b0b28-log-httpd\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.229609 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.230014 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.230066 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-config-data\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.230179 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.230904 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/335e7bb5-075d-48d3-9fa4-3570660b0b28-scripts\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.239492 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brfcf\" (UniqueName: \"kubernetes.io/projected/335e7bb5-075d-48d3-9fa4-3570660b0b28-kube-api-access-brfcf\") pod \"ceilometer-0\" (UID: \"335e7bb5-075d-48d3-9fa4-3570660b0b28\") " pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.397276 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.750628 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72b8cd6d-9dfa-483d-9634-09df584bf7ed" path="/var/lib/kubelet/pods/72b8cd6d-9dfa-483d-9634-09df584bf7ed/volumes" Sep 29 13:32:37 crc kubenswrapper[4611]: I0929 13:32:37.907987 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 13:32:38 crc kubenswrapper[4611]: I0929 13:32:38.195449 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:32:38 crc kubenswrapper[4611]: I0929 13:32:38.195826 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:32:38 crc kubenswrapper[4611]: I0929 13:32:38.197694 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b7c6854c4-jrwd9" podUID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.6:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.6:8443: connect: connection refused" Sep 29 13:32:38 crc kubenswrapper[4611]: I0929 13:32:38.700574 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"335e7bb5-075d-48d3-9fa4-3570660b0b28","Type":"ContainerStarted","Data":"90c010d66dcb7171fb66cca2c469cac8687da4392c6f5454e2279c3893ffde75"} Sep 29 13:32:39 crc kubenswrapper[4611]: I0929 13:32:39.710585 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"335e7bb5-075d-48d3-9fa4-3570660b0b28","Type":"ContainerStarted","Data":"7d27e430eadc5eb22e3f83f47a9e933d5701c48d3410b65d5974f62330eb3fe7"} Sep 29 13:32:40 crc kubenswrapper[4611]: I0929 13:32:40.722030 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"335e7bb5-075d-48d3-9fa4-3570660b0b28","Type":"ContainerStarted","Data":"ab239c989dc9968bcabeda7928f74438bd8a4699dd7dbd33238e4a65c8a0701f"} Sep 29 13:32:41 crc kubenswrapper[4611]: I0929 13:32:41.753691 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"335e7bb5-075d-48d3-9fa4-3570660b0b28","Type":"ContainerStarted","Data":"3bdf3a4e90af4c23ea4fcb56a22386bccb05660494c129651346c67ca9615b63"} Sep 29 13:32:42 crc kubenswrapper[4611]: I0929 13:32:42.753232 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"335e7bb5-075d-48d3-9fa4-3570660b0b28","Type":"ContainerStarted","Data":"dcd949848ce0e4ebe05fe2b75c0372b8b1182b64699dd3b744c5cce0bdbb1c7b"} Sep 29 13:32:42 crc kubenswrapper[4611]: I0929 13:32:42.753959 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 13:32:42 crc kubenswrapper[4611]: I0929 13:32:42.774481 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.306234109 podStartE2EDuration="5.774466018s" podCreationTimestamp="2025-09-29 13:32:37 +0000 UTC" firstStartedPulling="2025-09-29 13:32:37.927741349 +0000 UTC m=+3144.819260955" lastFinishedPulling="2025-09-29 13:32:42.395973258 +0000 UTC m=+3149.287492864" observedRunningTime="2025-09-29 13:32:42.77177012 +0000 UTC m=+3149.663289726" watchObservedRunningTime="2025-09-29 13:32:42.774466018 +0000 UTC m=+3149.665985624" Sep 29 13:32:45 crc kubenswrapper[4611]: I0929 13:32:45.737530 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:32:45 crc kubenswrapper[4611]: E0929 13:32:45.739012 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:32:48 crc kubenswrapper[4611]: I0929 13:32:48.195425 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b7c6854c4-jrwd9" podUID="c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.6:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.6:8443: connect: connection refused" Sep 29 13:32:56 crc kubenswrapper[4611]: I0929 13:32:56.736009 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:32:56 crc kubenswrapper[4611]: E0929 13:32:56.736905 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:33:01 crc kubenswrapper[4611]: I0929 13:33:01.429701 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:33:03 crc kubenswrapper[4611]: I0929 13:33:03.384325 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7b7c6854c4-jrwd9" Sep 29 13:33:03 crc kubenswrapper[4611]: I0929 13:33:03.477264 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f7ffcb4c4-cz4zj"] Sep 29 13:33:03 crc kubenswrapper[4611]: I0929 13:33:03.477502 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon-log" containerID="cri-o://0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426" gracePeriod=30 Sep 29 13:33:03 crc kubenswrapper[4611]: I0929 13:33:03.477617 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" containerID="cri-o://d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9" gracePeriod=30 Sep 29 13:33:04 crc kubenswrapper[4611]: I0929 13:33:04.968572 4611 generic.go:334] "Generic (PLEG): container finished" podID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerID="d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9" exitCode=0 Sep 29 13:33:04 crc kubenswrapper[4611]: I0929 13:33:04.968644 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerDied","Data":"d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9"} Sep 29 13:33:04 crc kubenswrapper[4611]: I0929 13:33:04.969573 4611 scope.go:117] "RemoveContainer" containerID="d741cd0ee1243c3bc0d72e207169fa52be409271801e3754c56404673f372a6e" Sep 29 13:33:07 crc kubenswrapper[4611]: I0929 13:33:07.408790 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 13:33:08 crc kubenswrapper[4611]: I0929 13:33:08.562167 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Sep 29 13:33:09 crc kubenswrapper[4611]: I0929 13:33:09.736236 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:33:09 crc kubenswrapper[4611]: E0929 13:33:09.736671 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:33:18 crc kubenswrapper[4611]: I0929 13:33:18.561963 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Sep 29 13:33:23 crc kubenswrapper[4611]: I0929 13:33:23.755870 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:33:23 crc kubenswrapper[4611]: E0929 13:33:23.757287 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:33:28 crc kubenswrapper[4611]: I0929 13:33:28.562223 4611 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6f7ffcb4c4-cz4zj" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Sep 29 13:33:28 crc kubenswrapper[4611]: I0929 13:33:28.562826 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.877778 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892386 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-combined-ca-bundle\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892436 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-scripts\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892527 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66aeb2b0-254f-4c1c-b565-438e9f754366-logs\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892561 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77kgc\" (UniqueName: \"kubernetes.io/projected/66aeb2b0-254f-4c1c-b565-438e9f754366-kube-api-access-77kgc\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892640 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-secret-key\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892667 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-tls-certs\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.892708 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-config-data\") pod \"66aeb2b0-254f-4c1c-b565-438e9f754366\" (UID: \"66aeb2b0-254f-4c1c-b565-438e9f754366\") " Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.896290 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66aeb2b0-254f-4c1c-b565-438e9f754366-logs" (OuterVolumeSpecName: "logs") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.902521 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66aeb2b0-254f-4c1c-b565-438e9f754366-kube-api-access-77kgc" (OuterVolumeSpecName: "kube-api-access-77kgc") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "kube-api-access-77kgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.915525 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.949119 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.953770 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-scripts" (OuterVolumeSpecName: "scripts") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.971909 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-config-data" (OuterVolumeSpecName: "config-data") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.992908 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "66aeb2b0-254f-4c1c-b565-438e9f754366" (UID: "66aeb2b0-254f-4c1c-b565-438e9f754366"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995565 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995599 4611 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995614 4611 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66aeb2b0-254f-4c1c-b565-438e9f754366-logs\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995646 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77kgc\" (UniqueName: \"kubernetes.io/projected/66aeb2b0-254f-4c1c-b565-438e9f754366-kube-api-access-77kgc\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995659 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995668 4611 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/66aeb2b0-254f-4c1c-b565-438e9f754366-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:33 crc kubenswrapper[4611]: I0929 13:33:33.995678 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66aeb2b0-254f-4c1c-b565-438e9f754366-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.257134 4611 generic.go:334] "Generic (PLEG): container finished" podID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerID="0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426" exitCode=137 Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.257266 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerDied","Data":"0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426"} Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.257342 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7ffcb4c4-cz4zj" event={"ID":"66aeb2b0-254f-4c1c-b565-438e9f754366","Type":"ContainerDied","Data":"a966c0a2ba0101bad71be7df8afba6226c8b21f4e8f586d1f1905d26e41f3ac2"} Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.257377 4611 scope.go:117] "RemoveContainer" containerID="d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.257783 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7ffcb4c4-cz4zj" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.310239 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f7ffcb4c4-cz4zj"] Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.319297 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6f7ffcb4c4-cz4zj"] Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.521195 4611 scope.go:117] "RemoveContainer" containerID="0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.581457 4611 scope.go:117] "RemoveContainer" containerID="d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9" Sep 29 13:33:34 crc kubenswrapper[4611]: E0929 13:33:34.582002 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9\": container with ID starting with d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9 not found: ID does not exist" containerID="d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.582075 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9"} err="failed to get container status \"d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9\": rpc error: code = NotFound desc = could not find container \"d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9\": container with ID starting with d4d673714fd4a4318313a0cd3b1ddc3d2006d1f89ab76ed1efc8769a8210fba9 not found: ID does not exist" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.582115 4611 scope.go:117] "RemoveContainer" containerID="0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426" Sep 29 13:33:34 crc kubenswrapper[4611]: E0929 13:33:34.582893 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426\": container with ID starting with 0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426 not found: ID does not exist" containerID="0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426" Sep 29 13:33:34 crc kubenswrapper[4611]: I0929 13:33:34.582927 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426"} err="failed to get container status \"0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426\": rpc error: code = NotFound desc = could not find container \"0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426\": container with ID starting with 0c2526d73c40bff0e52bf993ae32ed105d8e0b1dd6874b25a983ef65d997e426 not found: ID does not exist" Sep 29 13:33:35 crc kubenswrapper[4611]: I0929 13:33:35.749093 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" path="/var/lib/kubelet/pods/66aeb2b0-254f-4c1c-b565-438e9f754366/volumes" Sep 29 13:33:38 crc kubenswrapper[4611]: I0929 13:33:38.736580 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:33:38 crc kubenswrapper[4611]: E0929 13:33:38.737196 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:33:50 crc kubenswrapper[4611]: I0929 13:33:50.737140 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:33:50 crc kubenswrapper[4611]: E0929 13:33:50.737960 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:34:01 crc kubenswrapper[4611]: I0929 13:34:01.737127 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:34:01 crc kubenswrapper[4611]: E0929 13:34:01.738485 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.815316 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 13:34:05 crc kubenswrapper[4611]: E0929 13:34:05.816190 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.816204 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" Sep 29 13:34:05 crc kubenswrapper[4611]: E0929 13:34:05.816226 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.816233 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" Sep 29 13:34:05 crc kubenswrapper[4611]: E0929 13:34:05.816269 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon-log" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.816275 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon-log" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.816462 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.816480 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon-log" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.817193 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.823106 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.823496 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-92hc4" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.823812 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.824076 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941208 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941288 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941325 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941376 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941438 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941499 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941542 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdtlz\" (UniqueName: \"kubernetes.io/projected/2d140fa2-fe3d-4e16-810f-c9b568c4554c-kube-api-access-fdtlz\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941569 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.941616 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-config-data\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:05 crc kubenswrapper[4611]: I0929 13:34:05.952659 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043029 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043099 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043171 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043241 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043266 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdtlz\" (UniqueName: \"kubernetes.io/projected/2d140fa2-fe3d-4e16-810f-c9b568c4554c-kube-api-access-fdtlz\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043297 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043327 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-config-data\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043389 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043420 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.043867 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.044073 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.044727 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.044728 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.045523 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-config-data\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.054463 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.055091 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.058129 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.065440 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdtlz\" (UniqueName: \"kubernetes.io/projected/2d140fa2-fe3d-4e16-810f-c9b568c4554c-kube-api-access-fdtlz\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.079644 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.144220 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 13:34:06 crc kubenswrapper[4611]: I0929 13:34:06.626918 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 13:34:07 crc kubenswrapper[4611]: I0929 13:34:07.587428 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2d140fa2-fe3d-4e16-810f-c9b568c4554c","Type":"ContainerStarted","Data":"c565ad2a16f30253fde843ada4e07a46613ae8786818f16462a1876e21e4d558"} Sep 29 13:34:16 crc kubenswrapper[4611]: I0929 13:34:16.736087 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:34:16 crc kubenswrapper[4611]: E0929 13:34:16.736974 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:34:19 crc kubenswrapper[4611]: I0929 13:34:19.130813 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 13:34:22 crc kubenswrapper[4611]: I0929 13:34:22.745788 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2d140fa2-fe3d-4e16-810f-c9b568c4554c","Type":"ContainerStarted","Data":"6614575c2c06c72c2240e272f83578a70ae0d822327ebe3564783586c69d9989"} Sep 29 13:34:22 crc kubenswrapper[4611]: I0929 13:34:22.765315 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=6.25811349 podStartE2EDuration="18.765297385s" podCreationTimestamp="2025-09-29 13:34:04 +0000 UTC" firstStartedPulling="2025-09-29 13:34:06.61951112 +0000 UTC m=+3233.511030726" lastFinishedPulling="2025-09-29 13:34:19.126695025 +0000 UTC m=+3246.018214621" observedRunningTime="2025-09-29 13:34:22.76167512 +0000 UTC m=+3249.653194736" watchObservedRunningTime="2025-09-29 13:34:22.765297385 +0000 UTC m=+3249.656816991" Sep 29 13:34:28 crc kubenswrapper[4611]: I0929 13:34:28.736208 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:34:28 crc kubenswrapper[4611]: E0929 13:34:28.736754 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:34:39 crc kubenswrapper[4611]: I0929 13:34:39.736432 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:34:39 crc kubenswrapper[4611]: E0929 13:34:39.737691 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:34:53 crc kubenswrapper[4611]: I0929 13:34:53.749667 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:34:53 crc kubenswrapper[4611]: E0929 13:34:53.750618 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:35:05 crc kubenswrapper[4611]: I0929 13:35:05.736513 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:35:05 crc kubenswrapper[4611]: E0929 13:35:05.738696 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:35:17 crc kubenswrapper[4611]: I0929 13:35:17.736725 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:35:17 crc kubenswrapper[4611]: E0929 13:35:17.737382 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.264044 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7xj98"] Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.265578 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="66aeb2b0-254f-4c1c-b565-438e9f754366" containerName="horizon" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.268184 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.288283 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7xj98"] Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.306826 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnsnz\" (UniqueName: \"kubernetes.io/projected/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-kube-api-access-mnsnz\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.309702 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-catalog-content\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.309907 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-utilities\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.412046 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-utilities\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.412104 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnsnz\" (UniqueName: \"kubernetes.io/projected/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-kube-api-access-mnsnz\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.412210 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-catalog-content\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.412547 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-utilities\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.412584 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-catalog-content\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.444327 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnsnz\" (UniqueName: \"kubernetes.io/projected/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-kube-api-access-mnsnz\") pod \"community-operators-7xj98\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:21 crc kubenswrapper[4611]: I0929 13:35:21.593906 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:22 crc kubenswrapper[4611]: I0929 13:35:22.394604 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7xj98"] Sep 29 13:35:23 crc kubenswrapper[4611]: I0929 13:35:23.342443 4611 generic.go:334] "Generic (PLEG): container finished" podID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerID="f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85" exitCode=0 Sep 29 13:35:23 crc kubenswrapper[4611]: I0929 13:35:23.342562 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerDied","Data":"f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85"} Sep 29 13:35:23 crc kubenswrapper[4611]: I0929 13:35:23.342781 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerStarted","Data":"afc124a0eafad2aa063aa8ffde09cb0994fba401717258ab2360b4d80d3baa7c"} Sep 29 13:35:25 crc kubenswrapper[4611]: I0929 13:35:25.363353 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerStarted","Data":"b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c"} Sep 29 13:35:28 crc kubenswrapper[4611]: I0929 13:35:28.395733 4611 generic.go:334] "Generic (PLEG): container finished" podID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerID="b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c" exitCode=0 Sep 29 13:35:28 crc kubenswrapper[4611]: I0929 13:35:28.395812 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerDied","Data":"b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c"} Sep 29 13:35:28 crc kubenswrapper[4611]: I0929 13:35:28.398955 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:35:29 crc kubenswrapper[4611]: I0929 13:35:29.408976 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerStarted","Data":"a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024"} Sep 29 13:35:29 crc kubenswrapper[4611]: I0929 13:35:29.454526 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7xj98" podStartSLOduration=2.921089263 podStartE2EDuration="8.454507202s" podCreationTimestamp="2025-09-29 13:35:21 +0000 UTC" firstStartedPulling="2025-09-29 13:35:23.344790178 +0000 UTC m=+3310.236309794" lastFinishedPulling="2025-09-29 13:35:28.878208127 +0000 UTC m=+3315.769727733" observedRunningTime="2025-09-29 13:35:29.438921301 +0000 UTC m=+3316.330440927" watchObservedRunningTime="2025-09-29 13:35:29.454507202 +0000 UTC m=+3316.346026808" Sep 29 13:35:31 crc kubenswrapper[4611]: I0929 13:35:31.594280 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:31 crc kubenswrapper[4611]: I0929 13:35:31.594936 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:31 crc kubenswrapper[4611]: I0929 13:35:31.659985 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:32 crc kubenswrapper[4611]: I0929 13:35:32.736870 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:35:32 crc kubenswrapper[4611]: E0929 13:35:32.737429 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:35:41 crc kubenswrapper[4611]: I0929 13:35:41.642789 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:41 crc kubenswrapper[4611]: I0929 13:35:41.706495 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7xj98"] Sep 29 13:35:42 crc kubenswrapper[4611]: I0929 13:35:42.534264 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7xj98" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="registry-server" containerID="cri-o://a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024" gracePeriod=2 Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.017910 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.163382 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-catalog-content\") pod \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.163460 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnsnz\" (UniqueName: \"kubernetes.io/projected/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-kube-api-access-mnsnz\") pod \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.163602 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-utilities\") pod \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\" (UID: \"6164cdbc-cf24-438a-a8b3-70389ef4f5d2\") " Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.164385 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-utilities" (OuterVolumeSpecName: "utilities") pod "6164cdbc-cf24-438a-a8b3-70389ef4f5d2" (UID: "6164cdbc-cf24-438a-a8b3-70389ef4f5d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.173880 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-kube-api-access-mnsnz" (OuterVolumeSpecName: "kube-api-access-mnsnz") pod "6164cdbc-cf24-438a-a8b3-70389ef4f5d2" (UID: "6164cdbc-cf24-438a-a8b3-70389ef4f5d2"). InnerVolumeSpecName "kube-api-access-mnsnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.219942 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6164cdbc-cf24-438a-a8b3-70389ef4f5d2" (UID: "6164cdbc-cf24-438a-a8b3-70389ef4f5d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.265972 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.266244 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnsnz\" (UniqueName: \"kubernetes.io/projected/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-kube-api-access-mnsnz\") on node \"crc\" DevicePath \"\"" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.266356 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6164cdbc-cf24-438a-a8b3-70389ef4f5d2-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.550312 4611 generic.go:334] "Generic (PLEG): container finished" podID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerID="a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024" exitCode=0 Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.550391 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7xj98" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.550391 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerDied","Data":"a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024"} Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.550927 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xj98" event={"ID":"6164cdbc-cf24-438a-a8b3-70389ef4f5d2","Type":"ContainerDied","Data":"afc124a0eafad2aa063aa8ffde09cb0994fba401717258ab2360b4d80d3baa7c"} Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.550986 4611 scope.go:117] "RemoveContainer" containerID="a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.580960 4611 scope.go:117] "RemoveContainer" containerID="b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.596223 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7xj98"] Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.605498 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7xj98"] Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.612899 4611 scope.go:117] "RemoveContainer" containerID="f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.677082 4611 scope.go:117] "RemoveContainer" containerID="a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024" Sep 29 13:35:43 crc kubenswrapper[4611]: E0929 13:35:43.677738 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024\": container with ID starting with a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024 not found: ID does not exist" containerID="a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.679581 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024"} err="failed to get container status \"a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024\": rpc error: code = NotFound desc = could not find container \"a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024\": container with ID starting with a10dcbef89893bce24ba04f7036cd1a4c2f2ec7fbf261198b180c4cef253d024 not found: ID does not exist" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.679827 4611 scope.go:117] "RemoveContainer" containerID="b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c" Sep 29 13:35:43 crc kubenswrapper[4611]: E0929 13:35:43.680439 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c\": container with ID starting with b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c not found: ID does not exist" containerID="b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.680496 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c"} err="failed to get container status \"b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c\": rpc error: code = NotFound desc = could not find container \"b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c\": container with ID starting with b22f4a18f504352b1449d5f811b2302bbf304da287a564978510383cef770b7c not found: ID does not exist" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.680532 4611 scope.go:117] "RemoveContainer" containerID="f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85" Sep 29 13:35:43 crc kubenswrapper[4611]: E0929 13:35:43.681069 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85\": container with ID starting with f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85 not found: ID does not exist" containerID="f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.681105 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85"} err="failed to get container status \"f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85\": rpc error: code = NotFound desc = could not find container \"f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85\": container with ID starting with f760909463c697b5d11ecf4a10bddf4f9cd54ff6be0fee096685cabec6713c85 not found: ID does not exist" Sep 29 13:35:43 crc kubenswrapper[4611]: I0929 13:35:43.750900 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" path="/var/lib/kubelet/pods/6164cdbc-cf24-438a-a8b3-70389ef4f5d2/volumes" Sep 29 13:35:44 crc kubenswrapper[4611]: I0929 13:35:44.736712 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:35:45 crc kubenswrapper[4611]: I0929 13:35:45.573750 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"c1c60b189458280b5e8e48ba6c5da7b7ab1db6e743825298ec4421b91f8d9f9d"} Sep 29 13:36:37 crc kubenswrapper[4611]: I0929 13:36:37.434784 4611 scope.go:117] "RemoveContainer" containerID="f9ea27458ba186acb8854d21d39163af0f4f71962a3759051f59b5ea26e425f0" Sep 29 13:36:37 crc kubenswrapper[4611]: I0929 13:36:37.467709 4611 scope.go:117] "RemoveContainer" containerID="c3a7a750342dd761b9e7ded05766349223a661afc800e0beffb652d24e0607ac" Sep 29 13:36:37 crc kubenswrapper[4611]: I0929 13:36:37.490807 4611 scope.go:117] "RemoveContainer" containerID="31e00aac422c6e998d6fc71162972b363c360c16fb14e37f8125e453d3526ac4" Sep 29 13:38:04 crc kubenswrapper[4611]: I0929 13:38:04.628669 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:38:04 crc kubenswrapper[4611]: I0929 13:38:04.629338 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:38:34 crc kubenswrapper[4611]: I0929 13:38:34.628275 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:38:34 crc kubenswrapper[4611]: I0929 13:38:34.629117 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:39:04 crc kubenswrapper[4611]: I0929 13:39:04.628579 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:39:04 crc kubenswrapper[4611]: I0929 13:39:04.629266 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:39:04 crc kubenswrapper[4611]: I0929 13:39:04.629329 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:39:04 crc kubenswrapper[4611]: I0929 13:39:04.630254 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c1c60b189458280b5e8e48ba6c5da7b7ab1db6e743825298ec4421b91f8d9f9d"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:39:04 crc kubenswrapper[4611]: I0929 13:39:04.630326 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://c1c60b189458280b5e8e48ba6c5da7b7ab1db6e743825298ec4421b91f8d9f9d" gracePeriod=600 Sep 29 13:39:05 crc kubenswrapper[4611]: I0929 13:39:05.740149 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="c1c60b189458280b5e8e48ba6c5da7b7ab1db6e743825298ec4421b91f8d9f9d" exitCode=0 Sep 29 13:39:05 crc kubenswrapper[4611]: I0929 13:39:05.758075 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"c1c60b189458280b5e8e48ba6c5da7b7ab1db6e743825298ec4421b91f8d9f9d"} Sep 29 13:39:05 crc kubenswrapper[4611]: I0929 13:39:05.758162 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24"} Sep 29 13:39:05 crc kubenswrapper[4611]: I0929 13:39:05.758206 4611 scope.go:117] "RemoveContainer" containerID="82e3ab54cc5edc2be6ce67df0cb9785d1778ebb12813f37e13920184b0f7b18c" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.583607 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r78d4"] Sep 29 13:40:04 crc kubenswrapper[4611]: E0929 13:40:04.587070 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="extract-content" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.587115 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="extract-content" Sep 29 13:40:04 crc kubenswrapper[4611]: E0929 13:40:04.587148 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="registry-server" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.587155 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="registry-server" Sep 29 13:40:04 crc kubenswrapper[4611]: E0929 13:40:04.587173 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="extract-utilities" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.587182 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="extract-utilities" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.587455 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="6164cdbc-cf24-438a-a8b3-70389ef4f5d2" containerName="registry-server" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.589101 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.635442 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r78d4"] Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.668981 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-utilities\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.669058 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-catalog-content\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.669255 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6rj5\" (UniqueName: \"kubernetes.io/projected/3e3cd927-937d-4024-b3f5-3a2080fbef11-kube-api-access-r6rj5\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.771829 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-utilities\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.771989 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-catalog-content\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.772078 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6rj5\" (UniqueName: \"kubernetes.io/projected/3e3cd927-937d-4024-b3f5-3a2080fbef11-kube-api-access-r6rj5\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.772410 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-utilities\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.772755 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-catalog-content\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.797492 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6rj5\" (UniqueName: \"kubernetes.io/projected/3e3cd927-937d-4024-b3f5-3a2080fbef11-kube-api-access-r6rj5\") pod \"redhat-operators-r78d4\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:04 crc kubenswrapper[4611]: I0929 13:40:04.910745 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:05 crc kubenswrapper[4611]: I0929 13:40:05.445263 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r78d4"] Sep 29 13:40:06 crc kubenswrapper[4611]: I0929 13:40:06.316663 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerDied","Data":"1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5"} Sep 29 13:40:06 crc kubenswrapper[4611]: I0929 13:40:06.316607 4611 generic.go:334] "Generic (PLEG): container finished" podID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerID="1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5" exitCode=0 Sep 29 13:40:06 crc kubenswrapper[4611]: I0929 13:40:06.316964 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerStarted","Data":"47dd6a817d16fc4084bc71d23e50aeb986fd291174e2bc053871f03270cb594a"} Sep 29 13:40:08 crc kubenswrapper[4611]: I0929 13:40:08.334999 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerStarted","Data":"ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00"} Sep 29 13:40:11 crc kubenswrapper[4611]: I0929 13:40:11.361934 4611 generic.go:334] "Generic (PLEG): container finished" podID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerID="ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00" exitCode=0 Sep 29 13:40:11 crc kubenswrapper[4611]: I0929 13:40:11.362006 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerDied","Data":"ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00"} Sep 29 13:40:12 crc kubenswrapper[4611]: I0929 13:40:12.379637 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerStarted","Data":"e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed"} Sep 29 13:40:12 crc kubenswrapper[4611]: I0929 13:40:12.409448 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r78d4" podStartSLOduration=2.900481865 podStartE2EDuration="8.409428016s" podCreationTimestamp="2025-09-29 13:40:04 +0000 UTC" firstStartedPulling="2025-09-29 13:40:06.320419881 +0000 UTC m=+3593.211939487" lastFinishedPulling="2025-09-29 13:40:11.829366032 +0000 UTC m=+3598.720885638" observedRunningTime="2025-09-29 13:40:12.394488474 +0000 UTC m=+3599.286008090" watchObservedRunningTime="2025-09-29 13:40:12.409428016 +0000 UTC m=+3599.300947622" Sep 29 13:40:14 crc kubenswrapper[4611]: I0929 13:40:14.911601 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:14 crc kubenswrapper[4611]: I0929 13:40:14.913649 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:15 crc kubenswrapper[4611]: I0929 13:40:15.966742 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r78d4" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="registry-server" probeResult="failure" output=< Sep 29 13:40:15 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:40:15 crc kubenswrapper[4611]: > Sep 29 13:40:25 crc kubenswrapper[4611]: I0929 13:40:25.958199 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r78d4" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="registry-server" probeResult="failure" output=< Sep 29 13:40:25 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:40:25 crc kubenswrapper[4611]: > Sep 29 13:40:34 crc kubenswrapper[4611]: I0929 13:40:34.961748 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:35 crc kubenswrapper[4611]: I0929 13:40:35.016466 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:35 crc kubenswrapper[4611]: I0929 13:40:35.777412 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r78d4"] Sep 29 13:40:36 crc kubenswrapper[4611]: I0929 13:40:36.620698 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r78d4" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="registry-server" containerID="cri-o://e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed" gracePeriod=2 Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.378983 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.494001 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6rj5\" (UniqueName: \"kubernetes.io/projected/3e3cd927-937d-4024-b3f5-3a2080fbef11-kube-api-access-r6rj5\") pod \"3e3cd927-937d-4024-b3f5-3a2080fbef11\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.494087 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-utilities\") pod \"3e3cd927-937d-4024-b3f5-3a2080fbef11\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.494218 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-catalog-content\") pod \"3e3cd927-937d-4024-b3f5-3a2080fbef11\" (UID: \"3e3cd927-937d-4024-b3f5-3a2080fbef11\") " Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.494819 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-utilities" (OuterVolumeSpecName: "utilities") pod "3e3cd927-937d-4024-b3f5-3a2080fbef11" (UID: "3e3cd927-937d-4024-b3f5-3a2080fbef11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.501748 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e3cd927-937d-4024-b3f5-3a2080fbef11-kube-api-access-r6rj5" (OuterVolumeSpecName: "kube-api-access-r6rj5") pod "3e3cd927-937d-4024-b3f5-3a2080fbef11" (UID: "3e3cd927-937d-4024-b3f5-3a2080fbef11"). InnerVolumeSpecName "kube-api-access-r6rj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.596844 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6rj5\" (UniqueName: \"kubernetes.io/projected/3e3cd927-937d-4024-b3f5-3a2080fbef11-kube-api-access-r6rj5\") on node \"crc\" DevicePath \"\"" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.596888 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.597776 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e3cd927-937d-4024-b3f5-3a2080fbef11" (UID: "3e3cd927-937d-4024-b3f5-3a2080fbef11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.632596 4611 generic.go:334] "Generic (PLEG): container finished" podID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerID="e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed" exitCode=0 Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.632663 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerDied","Data":"e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed"} Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.632704 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r78d4" event={"ID":"3e3cd927-937d-4024-b3f5-3a2080fbef11","Type":"ContainerDied","Data":"47dd6a817d16fc4084bc71d23e50aeb986fd291174e2bc053871f03270cb594a"} Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.632725 4611 scope.go:117] "RemoveContainer" containerID="e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.632673 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r78d4" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.655064 4611 scope.go:117] "RemoveContainer" containerID="ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.680883 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r78d4"] Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.691768 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r78d4"] Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.698268 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e3cd927-937d-4024-b3f5-3a2080fbef11-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.702886 4611 scope.go:117] "RemoveContainer" containerID="1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.733955 4611 scope.go:117] "RemoveContainer" containerID="e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed" Sep 29 13:40:37 crc kubenswrapper[4611]: E0929 13:40:37.736979 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed\": container with ID starting with e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed not found: ID does not exist" containerID="e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.737024 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed"} err="failed to get container status \"e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed\": rpc error: code = NotFound desc = could not find container \"e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed\": container with ID starting with e74ef1a12074ebdc592efe85a92b4652ae5c708e54b86facf8c4ee5d509305ed not found: ID does not exist" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.737051 4611 scope.go:117] "RemoveContainer" containerID="ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00" Sep 29 13:40:37 crc kubenswrapper[4611]: E0929 13:40:37.738249 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00\": container with ID starting with ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00 not found: ID does not exist" containerID="ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.738269 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00"} err="failed to get container status \"ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00\": rpc error: code = NotFound desc = could not find container \"ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00\": container with ID starting with ecba01db9955f564520c3d8b66809e9bf96fff46e8ecdb59a4f433ff4e522e00 not found: ID does not exist" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.738283 4611 scope.go:117] "RemoveContainer" containerID="1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5" Sep 29 13:40:37 crc kubenswrapper[4611]: E0929 13:40:37.739055 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5\": container with ID starting with 1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5 not found: ID does not exist" containerID="1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.739074 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5"} err="failed to get container status \"1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5\": rpc error: code = NotFound desc = could not find container \"1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5\": container with ID starting with 1b04e94c688e5b82ccf265adf71fa79f705acd69bcc4e281daacb91fbcc39db5 not found: ID does not exist" Sep 29 13:40:37 crc kubenswrapper[4611]: I0929 13:40:37.751489 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" path="/var/lib/kubelet/pods/3e3cd927-937d-4024-b3f5-3a2080fbef11/volumes" Sep 29 13:41:04 crc kubenswrapper[4611]: I0929 13:41:04.629173 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:41:04 crc kubenswrapper[4611]: I0929 13:41:04.629848 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:41:26 crc kubenswrapper[4611]: I0929 13:41:26.055572 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-r2rqs"] Sep 29 13:41:26 crc kubenswrapper[4611]: I0929 13:41:26.069287 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-r2rqs"] Sep 29 13:41:27 crc kubenswrapper[4611]: I0929 13:41:27.752484 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64f5bc16-a580-40ef-9e8a-506fc4714c2b" path="/var/lib/kubelet/pods/64f5bc16-a580-40ef-9e8a-506fc4714c2b/volumes" Sep 29 13:41:34 crc kubenswrapper[4611]: I0929 13:41:34.628922 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:41:34 crc kubenswrapper[4611]: I0929 13:41:34.629950 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:41:36 crc kubenswrapper[4611]: I0929 13:41:36.040650 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-cc47-account-create-bqg97"] Sep 29 13:41:36 crc kubenswrapper[4611]: I0929 13:41:36.051833 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-cc47-account-create-bqg97"] Sep 29 13:41:37 crc kubenswrapper[4611]: I0929 13:41:37.760417 4611 scope.go:117] "RemoveContainer" containerID="27ce1890bb10c0b4cb526ea94c45495f92f3b1d7d232bf8fa5bf31c18dbedc8a" Sep 29 13:41:37 crc kubenswrapper[4611]: I0929 13:41:37.768032 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc37efcb-2804-4a6f-a41e-82cdc2c608b9" path="/var/lib/kubelet/pods/dc37efcb-2804-4a6f-a41e-82cdc2c608b9/volumes" Sep 29 13:41:37 crc kubenswrapper[4611]: I0929 13:41:37.791247 4611 scope.go:117] "RemoveContainer" containerID="141d3f8a936186d819ca527dfc3ef9a957ba86c9bac4450d8b1c282ee893de8d" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.285942 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5lxwk"] Sep 29 13:41:58 crc kubenswrapper[4611]: E0929 13:41:58.286944 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="extract-content" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.286962 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="extract-content" Sep 29 13:41:58 crc kubenswrapper[4611]: E0929 13:41:58.286991 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="extract-utilities" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.286999 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="extract-utilities" Sep 29 13:41:58 crc kubenswrapper[4611]: E0929 13:41:58.287048 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="registry-server" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.287054 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="registry-server" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.287299 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e3cd927-937d-4024-b3f5-3a2080fbef11" containerName="registry-server" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.288812 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.316292 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5lxwk"] Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.406190 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-catalog-content\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.406250 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-utilities\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.406387 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qg5f\" (UniqueName: \"kubernetes.io/projected/04a1274c-3727-48c2-b5a6-15e179d71cb6-kube-api-access-5qg5f\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.508329 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qg5f\" (UniqueName: \"kubernetes.io/projected/04a1274c-3727-48c2-b5a6-15e179d71cb6-kube-api-access-5qg5f\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.508800 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-catalog-content\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.508852 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-utilities\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.509405 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-utilities\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.509526 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-catalog-content\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.528734 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qg5f\" (UniqueName: \"kubernetes.io/projected/04a1274c-3727-48c2-b5a6-15e179d71cb6-kube-api-access-5qg5f\") pod \"certified-operators-5lxwk\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:58 crc kubenswrapper[4611]: I0929 13:41:58.609444 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:41:59 crc kubenswrapper[4611]: I0929 13:41:59.256729 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5lxwk"] Sep 29 13:41:59 crc kubenswrapper[4611]: I0929 13:41:59.427902 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerStarted","Data":"123be27f29efed0b4a0d54d46497af198dfabdb638b3ac2d1099526f503ddfd4"} Sep 29 13:42:00 crc kubenswrapper[4611]: I0929 13:42:00.441018 4611 generic.go:334] "Generic (PLEG): container finished" podID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerID="5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb" exitCode=0 Sep 29 13:42:00 crc kubenswrapper[4611]: I0929 13:42:00.441087 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerDied","Data":"5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb"} Sep 29 13:42:00 crc kubenswrapper[4611]: I0929 13:42:00.445938 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:42:02 crc kubenswrapper[4611]: I0929 13:42:02.463657 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerStarted","Data":"fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb"} Sep 29 13:42:03 crc kubenswrapper[4611]: I0929 13:42:03.474086 4611 generic.go:334] "Generic (PLEG): container finished" podID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerID="fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb" exitCode=0 Sep 29 13:42:03 crc kubenswrapper[4611]: I0929 13:42:03.474329 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerDied","Data":"fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb"} Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.500242 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerStarted","Data":"0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9"} Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.529972 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5lxwk" podStartSLOduration=3.014883715 podStartE2EDuration="6.529949159s" podCreationTimestamp="2025-09-29 13:41:58 +0000 UTC" firstStartedPulling="2025-09-29 13:42:00.443679787 +0000 UTC m=+3707.335199393" lastFinishedPulling="2025-09-29 13:42:03.958745231 +0000 UTC m=+3710.850264837" observedRunningTime="2025-09-29 13:42:04.522202745 +0000 UTC m=+3711.413722371" watchObservedRunningTime="2025-09-29 13:42:04.529949159 +0000 UTC m=+3711.421468765" Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.628748 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.628816 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.628866 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.629679 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:42:04 crc kubenswrapper[4611]: I0929 13:42:04.629750 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" gracePeriod=600 Sep 29 13:42:04 crc kubenswrapper[4611]: E0929 13:42:04.758432 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.265207 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-slvgn"] Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.267853 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.284289 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-slvgn"] Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.357833 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-utilities\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.357876 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfdxp\" (UniqueName: \"kubernetes.io/projected/bc0c9851-8404-41a4-ae6f-893c5668e818-kube-api-access-zfdxp\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.357969 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-catalog-content\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.460498 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-utilities\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.460554 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfdxp\" (UniqueName: \"kubernetes.io/projected/bc0c9851-8404-41a4-ae6f-893c5668e818-kube-api-access-zfdxp\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.460666 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-catalog-content\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.461052 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-utilities\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.461157 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-catalog-content\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.486360 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfdxp\" (UniqueName: \"kubernetes.io/projected/bc0c9851-8404-41a4-ae6f-893c5668e818-kube-api-access-zfdxp\") pod \"redhat-marketplace-slvgn\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.511248 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" exitCode=0 Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.511325 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24"} Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.511377 4611 scope.go:117] "RemoveContainer" containerID="c1c60b189458280b5e8e48ba6c5da7b7ab1db6e743825298ec4421b91f8d9f9d" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.512023 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:42:05 crc kubenswrapper[4611]: E0929 13:42:05.512288 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:42:05 crc kubenswrapper[4611]: I0929 13:42:05.583861 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:06 crc kubenswrapper[4611]: I0929 13:42:06.045913 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-x2b9f"] Sep 29 13:42:06 crc kubenswrapper[4611]: I0929 13:42:06.056793 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-x2b9f"] Sep 29 13:42:06 crc kubenswrapper[4611]: I0929 13:42:06.179117 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-slvgn"] Sep 29 13:42:06 crc kubenswrapper[4611]: W0929 13:42:06.182803 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc0c9851_8404_41a4_ae6f_893c5668e818.slice/crio-fbffdd54793434578ea5d2661a6471f953ba8c995b16db1cdb8ba06997fd9f81 WatchSource:0}: Error finding container fbffdd54793434578ea5d2661a6471f953ba8c995b16db1cdb8ba06997fd9f81: Status 404 returned error can't find the container with id fbffdd54793434578ea5d2661a6471f953ba8c995b16db1cdb8ba06997fd9f81 Sep 29 13:42:06 crc kubenswrapper[4611]: I0929 13:42:06.524730 4611 generic.go:334] "Generic (PLEG): container finished" podID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerID="87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef" exitCode=0 Sep 29 13:42:06 crc kubenswrapper[4611]: I0929 13:42:06.524836 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerDied","Data":"87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef"} Sep 29 13:42:06 crc kubenswrapper[4611]: I0929 13:42:06.525117 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerStarted","Data":"fbffdd54793434578ea5d2661a6471f953ba8c995b16db1cdb8ba06997fd9f81"} Sep 29 13:42:07 crc kubenswrapper[4611]: I0929 13:42:07.535211 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerStarted","Data":"50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99"} Sep 29 13:42:07 crc kubenswrapper[4611]: I0929 13:42:07.770603 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="707d4443-620a-49ae-98b1-c45b03681060" path="/var/lib/kubelet/pods/707d4443-620a-49ae-98b1-c45b03681060/volumes" Sep 29 13:42:08 crc kubenswrapper[4611]: I0929 13:42:08.545400 4611 generic.go:334] "Generic (PLEG): container finished" podID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerID="50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99" exitCode=0 Sep 29 13:42:08 crc kubenswrapper[4611]: I0929 13:42:08.545492 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerDied","Data":"50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99"} Sep 29 13:42:08 crc kubenswrapper[4611]: I0929 13:42:08.610604 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:42:08 crc kubenswrapper[4611]: I0929 13:42:08.610812 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:42:08 crc kubenswrapper[4611]: I0929 13:42:08.665344 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:42:09 crc kubenswrapper[4611]: I0929 13:42:09.561093 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerStarted","Data":"1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f"} Sep 29 13:42:09 crc kubenswrapper[4611]: I0929 13:42:09.587964 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-slvgn" podStartSLOduration=2.01747765 podStartE2EDuration="4.58794196s" podCreationTimestamp="2025-09-29 13:42:05 +0000 UTC" firstStartedPulling="2025-09-29 13:42:06.526849823 +0000 UTC m=+3713.418369429" lastFinishedPulling="2025-09-29 13:42:09.097314133 +0000 UTC m=+3715.988833739" observedRunningTime="2025-09-29 13:42:09.580093053 +0000 UTC m=+3716.471612689" watchObservedRunningTime="2025-09-29 13:42:09.58794196 +0000 UTC m=+3716.479461586" Sep 29 13:42:09 crc kubenswrapper[4611]: I0929 13:42:09.611553 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:42:11 crc kubenswrapper[4611]: I0929 13:42:11.057136 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5lxwk"] Sep 29 13:42:11 crc kubenswrapper[4611]: I0929 13:42:11.578981 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5lxwk" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="registry-server" containerID="cri-o://0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9" gracePeriod=2 Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.077205 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.197607 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-catalog-content\") pod \"04a1274c-3727-48c2-b5a6-15e179d71cb6\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.197902 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qg5f\" (UniqueName: \"kubernetes.io/projected/04a1274c-3727-48c2-b5a6-15e179d71cb6-kube-api-access-5qg5f\") pod \"04a1274c-3727-48c2-b5a6-15e179d71cb6\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.197947 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-utilities\") pod \"04a1274c-3727-48c2-b5a6-15e179d71cb6\" (UID: \"04a1274c-3727-48c2-b5a6-15e179d71cb6\") " Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.199936 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-utilities" (OuterVolumeSpecName: "utilities") pod "04a1274c-3727-48c2-b5a6-15e179d71cb6" (UID: "04a1274c-3727-48c2-b5a6-15e179d71cb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.223059 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04a1274c-3727-48c2-b5a6-15e179d71cb6-kube-api-access-5qg5f" (OuterVolumeSpecName: "kube-api-access-5qg5f") pod "04a1274c-3727-48c2-b5a6-15e179d71cb6" (UID: "04a1274c-3727-48c2-b5a6-15e179d71cb6"). InnerVolumeSpecName "kube-api-access-5qg5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.246135 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04a1274c-3727-48c2-b5a6-15e179d71cb6" (UID: "04a1274c-3727-48c2-b5a6-15e179d71cb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.301999 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qg5f\" (UniqueName: \"kubernetes.io/projected/04a1274c-3727-48c2-b5a6-15e179d71cb6-kube-api-access-5qg5f\") on node \"crc\" DevicePath \"\"" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.302059 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.302106 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04a1274c-3727-48c2-b5a6-15e179d71cb6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.589196 4611 generic.go:334] "Generic (PLEG): container finished" podID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerID="0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9" exitCode=0 Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.589244 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerDied","Data":"0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9"} Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.589291 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lxwk" event={"ID":"04a1274c-3727-48c2-b5a6-15e179d71cb6","Type":"ContainerDied","Data":"123be27f29efed0b4a0d54d46497af198dfabdb638b3ac2d1099526f503ddfd4"} Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.589314 4611 scope.go:117] "RemoveContainer" containerID="0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.589311 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lxwk" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.617216 4611 scope.go:117] "RemoveContainer" containerID="fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.628116 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5lxwk"] Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.640043 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5lxwk"] Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.651766 4611 scope.go:117] "RemoveContainer" containerID="5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.687825 4611 scope.go:117] "RemoveContainer" containerID="0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9" Sep 29 13:42:12 crc kubenswrapper[4611]: E0929 13:42:12.688361 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9\": container with ID starting with 0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9 not found: ID does not exist" containerID="0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.688391 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9"} err="failed to get container status \"0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9\": rpc error: code = NotFound desc = could not find container \"0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9\": container with ID starting with 0041a4649f4f766f90850c02cefb49fe7af523ba52d5c9275f8a42b7f201aff9 not found: ID does not exist" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.688418 4611 scope.go:117] "RemoveContainer" containerID="fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb" Sep 29 13:42:12 crc kubenswrapper[4611]: E0929 13:42:12.689012 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb\": container with ID starting with fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb not found: ID does not exist" containerID="fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.689044 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb"} err="failed to get container status \"fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb\": rpc error: code = NotFound desc = could not find container \"fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb\": container with ID starting with fcb2d90fa91303d80f76c05aebf85e9efd46355cc87b7719e1f2eade93850efb not found: ID does not exist" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.689063 4611 scope.go:117] "RemoveContainer" containerID="5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb" Sep 29 13:42:12 crc kubenswrapper[4611]: E0929 13:42:12.689520 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb\": container with ID starting with 5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb not found: ID does not exist" containerID="5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb" Sep 29 13:42:12 crc kubenswrapper[4611]: I0929 13:42:12.689564 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb"} err="failed to get container status \"5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb\": rpc error: code = NotFound desc = could not find container \"5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb\": container with ID starting with 5350fb1744c5afd215799f81d9c7762eb96b00a2286d1c914474f4a06ea29beb not found: ID does not exist" Sep 29 13:42:13 crc kubenswrapper[4611]: I0929 13:42:13.751101 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" path="/var/lib/kubelet/pods/04a1274c-3727-48c2-b5a6-15e179d71cb6/volumes" Sep 29 13:42:15 crc kubenswrapper[4611]: I0929 13:42:15.584539 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:15 crc kubenswrapper[4611]: I0929 13:42:15.585363 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:15 crc kubenswrapper[4611]: I0929 13:42:15.631885 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:16 crc kubenswrapper[4611]: I0929 13:42:16.676384 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:16 crc kubenswrapper[4611]: I0929 13:42:16.732879 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-slvgn"] Sep 29 13:42:18 crc kubenswrapper[4611]: I0929 13:42:18.647113 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-slvgn" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="registry-server" containerID="cri-o://1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f" gracePeriod=2 Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.162417 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.241450 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfdxp\" (UniqueName: \"kubernetes.io/projected/bc0c9851-8404-41a4-ae6f-893c5668e818-kube-api-access-zfdxp\") pod \"bc0c9851-8404-41a4-ae6f-893c5668e818\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.241591 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-catalog-content\") pod \"bc0c9851-8404-41a4-ae6f-893c5668e818\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.241671 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-utilities\") pod \"bc0c9851-8404-41a4-ae6f-893c5668e818\" (UID: \"bc0c9851-8404-41a4-ae6f-893c5668e818\") " Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.242870 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-utilities" (OuterVolumeSpecName: "utilities") pod "bc0c9851-8404-41a4-ae6f-893c5668e818" (UID: "bc0c9851-8404-41a4-ae6f-893c5668e818"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.258942 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc0c9851-8404-41a4-ae6f-893c5668e818-kube-api-access-zfdxp" (OuterVolumeSpecName: "kube-api-access-zfdxp") pod "bc0c9851-8404-41a4-ae6f-893c5668e818" (UID: "bc0c9851-8404-41a4-ae6f-893c5668e818"). InnerVolumeSpecName "kube-api-access-zfdxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.260492 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc0c9851-8404-41a4-ae6f-893c5668e818" (UID: "bc0c9851-8404-41a4-ae6f-893c5668e818"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.343507 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfdxp\" (UniqueName: \"kubernetes.io/projected/bc0c9851-8404-41a4-ae6f-893c5668e818-kube-api-access-zfdxp\") on node \"crc\" DevicePath \"\"" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.343547 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.343557 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc0c9851-8404-41a4-ae6f-893c5668e818-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.658584 4611 generic.go:334] "Generic (PLEG): container finished" podID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerID="1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f" exitCode=0 Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.659055 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-slvgn" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.659083 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerDied","Data":"1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f"} Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.659796 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-slvgn" event={"ID":"bc0c9851-8404-41a4-ae6f-893c5668e818","Type":"ContainerDied","Data":"fbffdd54793434578ea5d2661a6471f953ba8c995b16db1cdb8ba06997fd9f81"} Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.659828 4611 scope.go:117] "RemoveContainer" containerID="1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.706670 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-slvgn"] Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.711843 4611 scope.go:117] "RemoveContainer" containerID="50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.719869 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-slvgn"] Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.737559 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:42:19 crc kubenswrapper[4611]: E0929 13:42:19.737937 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.742094 4611 scope.go:117] "RemoveContainer" containerID="87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.752034 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" path="/var/lib/kubelet/pods/bc0c9851-8404-41a4-ae6f-893c5668e818/volumes" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.793043 4611 scope.go:117] "RemoveContainer" containerID="1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f" Sep 29 13:42:19 crc kubenswrapper[4611]: E0929 13:42:19.793697 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f\": container with ID starting with 1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f not found: ID does not exist" containerID="1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.793743 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f"} err="failed to get container status \"1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f\": rpc error: code = NotFound desc = could not find container \"1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f\": container with ID starting with 1eaf051004a2d33e0d8a8c760cce610b80a80edb4a1bf4aa9f866d2ecd91112f not found: ID does not exist" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.793764 4611 scope.go:117] "RemoveContainer" containerID="50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99" Sep 29 13:42:19 crc kubenswrapper[4611]: E0929 13:42:19.794200 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99\": container with ID starting with 50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99 not found: ID does not exist" containerID="50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.794242 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99"} err="failed to get container status \"50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99\": rpc error: code = NotFound desc = could not find container \"50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99\": container with ID starting with 50f991249f25598ca7d2fa4813b1a990e6858936134fb9ba9c8ad0574279ea99 not found: ID does not exist" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.794269 4611 scope.go:117] "RemoveContainer" containerID="87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef" Sep 29 13:42:19 crc kubenswrapper[4611]: E0929 13:42:19.795016 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef\": container with ID starting with 87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef not found: ID does not exist" containerID="87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef" Sep 29 13:42:19 crc kubenswrapper[4611]: I0929 13:42:19.795075 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef"} err="failed to get container status \"87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef\": rpc error: code = NotFound desc = could not find container \"87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef\": container with ID starting with 87db8682112d68addd7652dfdd3579a070d04046d8d98b02ce0a0f1fbccaf7ef not found: ID does not exist" Sep 29 13:42:31 crc kubenswrapper[4611]: I0929 13:42:31.736149 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:42:31 crc kubenswrapper[4611]: E0929 13:42:31.736946 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:42:37 crc kubenswrapper[4611]: I0929 13:42:37.905977 4611 scope.go:117] "RemoveContainer" containerID="3aba9c22096497c5d73f348dfcce69511854a20dd59b16f1d9ba3e8bcb59a691" Sep 29 13:42:44 crc kubenswrapper[4611]: I0929 13:42:44.737000 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:42:44 crc kubenswrapper[4611]: E0929 13:42:44.737832 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:42:51 crc kubenswrapper[4611]: I0929 13:42:51.822155 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="46d0113e-4eb9-4b51-981e-744b6dd0842e" containerName="galera" probeResult="failure" output="command timed out" Sep 29 13:42:58 crc kubenswrapper[4611]: I0929 13:42:58.736302 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:42:58 crc kubenswrapper[4611]: E0929 13:42:58.737087 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:43:10 crc kubenswrapper[4611]: I0929 13:43:10.737214 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:43:10 crc kubenswrapper[4611]: E0929 13:43:10.738013 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:43:23 crc kubenswrapper[4611]: I0929 13:43:23.744908 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:43:23 crc kubenswrapper[4611]: E0929 13:43:23.745588 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:43:35 crc kubenswrapper[4611]: I0929 13:43:35.736758 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:43:35 crc kubenswrapper[4611]: E0929 13:43:35.737540 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:43:50 crc kubenswrapper[4611]: I0929 13:43:50.736355 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:43:50 crc kubenswrapper[4611]: E0929 13:43:50.737295 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:44:02 crc kubenswrapper[4611]: I0929 13:44:02.738060 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:44:02 crc kubenswrapper[4611]: E0929 13:44:02.739361 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:44:15 crc kubenswrapper[4611]: I0929 13:44:15.737139 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:44:15 crc kubenswrapper[4611]: E0929 13:44:15.738283 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:44:30 crc kubenswrapper[4611]: I0929 13:44:30.737050 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:44:30 crc kubenswrapper[4611]: E0929 13:44:30.738114 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:44:44 crc kubenswrapper[4611]: I0929 13:44:44.735744 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:44:44 crc kubenswrapper[4611]: E0929 13:44:44.736537 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:44:55 crc kubenswrapper[4611]: I0929 13:44:55.736950 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:44:55 crc kubenswrapper[4611]: E0929 13:44:55.738223 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.169823 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2"] Sep 29 13:45:00 crc kubenswrapper[4611]: E0929 13:45:00.172023 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="extract-utilities" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.172128 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="extract-utilities" Sep 29 13:45:00 crc kubenswrapper[4611]: E0929 13:45:00.172222 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="extract-content" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.172302 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="extract-content" Sep 29 13:45:00 crc kubenswrapper[4611]: E0929 13:45:00.172395 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="extract-utilities" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.172469 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="extract-utilities" Sep 29 13:45:00 crc kubenswrapper[4611]: E0929 13:45:00.172562 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="registry-server" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.172656 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="registry-server" Sep 29 13:45:00 crc kubenswrapper[4611]: E0929 13:45:00.174897 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="extract-content" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.175070 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="extract-content" Sep 29 13:45:00 crc kubenswrapper[4611]: E0929 13:45:00.175170 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="registry-server" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.175268 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="registry-server" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.175658 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="04a1274c-3727-48c2-b5a6-15e179d71cb6" containerName="registry-server" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.175782 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc0c9851-8404-41a4-ae6f-893c5668e818" containerName="registry-server" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.176790 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.181337 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.182935 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.189878 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2"] Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.282402 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq5cg\" (UniqueName: \"kubernetes.io/projected/517a165f-f5fb-4788-b61b-b2ad505703ab-kube-api-access-fq5cg\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.282488 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/517a165f-f5fb-4788-b61b-b2ad505703ab-secret-volume\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.282562 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/517a165f-f5fb-4788-b61b-b2ad505703ab-config-volume\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.384070 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/517a165f-f5fb-4788-b61b-b2ad505703ab-secret-volume\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.384176 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/517a165f-f5fb-4788-b61b-b2ad505703ab-config-volume\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.384305 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq5cg\" (UniqueName: \"kubernetes.io/projected/517a165f-f5fb-4788-b61b-b2ad505703ab-kube-api-access-fq5cg\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.385974 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/517a165f-f5fb-4788-b61b-b2ad505703ab-config-volume\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.392569 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/517a165f-f5fb-4788-b61b-b2ad505703ab-secret-volume\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.416305 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq5cg\" (UniqueName: \"kubernetes.io/projected/517a165f-f5fb-4788-b61b-b2ad505703ab-kube-api-access-fq5cg\") pod \"collect-profiles-29319225-hs9v2\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:00 crc kubenswrapper[4611]: I0929 13:45:00.507371 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:01 crc kubenswrapper[4611]: I0929 13:45:01.116730 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2"] Sep 29 13:45:01 crc kubenswrapper[4611]: I0929 13:45:01.282298 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" event={"ID":"517a165f-f5fb-4788-b61b-b2ad505703ab","Type":"ContainerStarted","Data":"2fa45b9c54881dd2288a57498a227fbddf5dd54ba5e38e1f4895e0ca51908044"} Sep 29 13:45:01 crc kubenswrapper[4611]: I0929 13:45:01.282394 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" event={"ID":"517a165f-f5fb-4788-b61b-b2ad505703ab","Type":"ContainerStarted","Data":"00d284125662bc39792a7bf1e0b88cf61f00be6d147e0647c974ec2d3c04336a"} Sep 29 13:45:01 crc kubenswrapper[4611]: I0929 13:45:01.301985 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" podStartSLOduration=1.301968303 podStartE2EDuration="1.301968303s" podCreationTimestamp="2025-09-29 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:45:01.301390696 +0000 UTC m=+3888.192910322" watchObservedRunningTime="2025-09-29 13:45:01.301968303 +0000 UTC m=+3888.193487909" Sep 29 13:45:02 crc kubenswrapper[4611]: I0929 13:45:02.296687 4611 generic.go:334] "Generic (PLEG): container finished" podID="517a165f-f5fb-4788-b61b-b2ad505703ab" containerID="2fa45b9c54881dd2288a57498a227fbddf5dd54ba5e38e1f4895e0ca51908044" exitCode=0 Sep 29 13:45:02 crc kubenswrapper[4611]: I0929 13:45:02.296868 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" event={"ID":"517a165f-f5fb-4788-b61b-b2ad505703ab","Type":"ContainerDied","Data":"2fa45b9c54881dd2288a57498a227fbddf5dd54ba5e38e1f4895e0ca51908044"} Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.658524 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.745820 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq5cg\" (UniqueName: \"kubernetes.io/projected/517a165f-f5fb-4788-b61b-b2ad505703ab-kube-api-access-fq5cg\") pod \"517a165f-f5fb-4788-b61b-b2ad505703ab\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.745982 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/517a165f-f5fb-4788-b61b-b2ad505703ab-config-volume\") pod \"517a165f-f5fb-4788-b61b-b2ad505703ab\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.746122 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/517a165f-f5fb-4788-b61b-b2ad505703ab-secret-volume\") pod \"517a165f-f5fb-4788-b61b-b2ad505703ab\" (UID: \"517a165f-f5fb-4788-b61b-b2ad505703ab\") " Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.746568 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/517a165f-f5fb-4788-b61b-b2ad505703ab-config-volume" (OuterVolumeSpecName: "config-volume") pod "517a165f-f5fb-4788-b61b-b2ad505703ab" (UID: "517a165f-f5fb-4788-b61b-b2ad505703ab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.751449 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/517a165f-f5fb-4788-b61b-b2ad505703ab-kube-api-access-fq5cg" (OuterVolumeSpecName: "kube-api-access-fq5cg") pod "517a165f-f5fb-4788-b61b-b2ad505703ab" (UID: "517a165f-f5fb-4788-b61b-b2ad505703ab"). InnerVolumeSpecName "kube-api-access-fq5cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.751759 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/517a165f-f5fb-4788-b61b-b2ad505703ab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "517a165f-f5fb-4788-b61b-b2ad505703ab" (UID: "517a165f-f5fb-4788-b61b-b2ad505703ab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.851259 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/517a165f-f5fb-4788-b61b-b2ad505703ab-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.851303 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq5cg\" (UniqueName: \"kubernetes.io/projected/517a165f-f5fb-4788-b61b-b2ad505703ab-kube-api-access-fq5cg\") on node \"crc\" DevicePath \"\"" Sep 29 13:45:03 crc kubenswrapper[4611]: I0929 13:45:03.851313 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/517a165f-f5fb-4788-b61b-b2ad505703ab-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:45:04 crc kubenswrapper[4611]: I0929 13:45:04.316707 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" event={"ID":"517a165f-f5fb-4788-b61b-b2ad505703ab","Type":"ContainerDied","Data":"00d284125662bc39792a7bf1e0b88cf61f00be6d147e0647c974ec2d3c04336a"} Sep 29 13:45:04 crc kubenswrapper[4611]: I0929 13:45:04.316751 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00d284125662bc39792a7bf1e0b88cf61f00be6d147e0647c974ec2d3c04336a" Sep 29 13:45:04 crc kubenswrapper[4611]: I0929 13:45:04.316767 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2" Sep 29 13:45:04 crc kubenswrapper[4611]: I0929 13:45:04.384053 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk"] Sep 29 13:45:04 crc kubenswrapper[4611]: I0929 13:45:04.392664 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319180-v6tzk"] Sep 29 13:45:05 crc kubenswrapper[4611]: I0929 13:45:05.748473 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2335d26b-bfa6-4d00-b9a1-a6ed61250684" path="/var/lib/kubelet/pods/2335d26b-bfa6-4d00-b9a1-a6ed61250684/volumes" Sep 29 13:45:07 crc kubenswrapper[4611]: I0929 13:45:07.737186 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:45:07 crc kubenswrapper[4611]: E0929 13:45:07.737762 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:45:19 crc kubenswrapper[4611]: I0929 13:45:19.736697 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:45:19 crc kubenswrapper[4611]: E0929 13:45:19.737515 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:45:31 crc kubenswrapper[4611]: I0929 13:45:31.737139 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:45:31 crc kubenswrapper[4611]: E0929 13:45:31.738010 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.063185 4611 scope.go:117] "RemoveContainer" containerID="70958e3a5a488608a2d03152b5a933319323321e0dfb317c1f329e7d77342323" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.475334 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-57qzm"] Sep 29 13:45:38 crc kubenswrapper[4611]: E0929 13:45:38.476250 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="517a165f-f5fb-4788-b61b-b2ad505703ab" containerName="collect-profiles" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.476281 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="517a165f-f5fb-4788-b61b-b2ad505703ab" containerName="collect-profiles" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.476610 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="517a165f-f5fb-4788-b61b-b2ad505703ab" containerName="collect-profiles" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.478890 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.490592 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-57qzm"] Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.653911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-utilities\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.653952 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wtj4\" (UniqueName: \"kubernetes.io/projected/fe948188-9910-4dc3-b0ed-d3966794efa0-kube-api-access-5wtj4\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.654114 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-catalog-content\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.756556 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-utilities\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.756638 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wtj4\" (UniqueName: \"kubernetes.io/projected/fe948188-9910-4dc3-b0ed-d3966794efa0-kube-api-access-5wtj4\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.756722 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-catalog-content\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.757345 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-utilities\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.757376 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-catalog-content\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.778025 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wtj4\" (UniqueName: \"kubernetes.io/projected/fe948188-9910-4dc3-b0ed-d3966794efa0-kube-api-access-5wtj4\") pod \"community-operators-57qzm\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:38 crc kubenswrapper[4611]: I0929 13:45:38.795736 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:39 crc kubenswrapper[4611]: I0929 13:45:39.419740 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-57qzm"] Sep 29 13:45:39 crc kubenswrapper[4611]: I0929 13:45:39.640407 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerStarted","Data":"998db1692dc1a5353de3cd6a34478a3af059779f68c52b681f2311ed2b59c538"} Sep 29 13:45:40 crc kubenswrapper[4611]: I0929 13:45:40.651486 4611 generic.go:334] "Generic (PLEG): container finished" podID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerID="8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98" exitCode=0 Sep 29 13:45:40 crc kubenswrapper[4611]: I0929 13:45:40.651685 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerDied","Data":"8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98"} Sep 29 13:45:41 crc kubenswrapper[4611]: I0929 13:45:41.663386 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerStarted","Data":"97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885"} Sep 29 13:45:42 crc kubenswrapper[4611]: I0929 13:45:42.737621 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:45:42 crc kubenswrapper[4611]: E0929 13:45:42.738049 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:45:43 crc kubenswrapper[4611]: I0929 13:45:43.682124 4611 generic.go:334] "Generic (PLEG): container finished" podID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerID="97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885" exitCode=0 Sep 29 13:45:43 crc kubenswrapper[4611]: I0929 13:45:43.682183 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerDied","Data":"97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885"} Sep 29 13:45:44 crc kubenswrapper[4611]: I0929 13:45:44.704664 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerStarted","Data":"4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897"} Sep 29 13:45:44 crc kubenswrapper[4611]: I0929 13:45:44.723153 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-57qzm" podStartSLOduration=3.303192293 podStartE2EDuration="6.723132807s" podCreationTimestamp="2025-09-29 13:45:38 +0000 UTC" firstStartedPulling="2025-09-29 13:45:40.654017081 +0000 UTC m=+3927.545536687" lastFinishedPulling="2025-09-29 13:45:44.073957595 +0000 UTC m=+3930.965477201" observedRunningTime="2025-09-29 13:45:44.720901872 +0000 UTC m=+3931.612421488" watchObservedRunningTime="2025-09-29 13:45:44.723132807 +0000 UTC m=+3931.614652413" Sep 29 13:45:48 crc kubenswrapper[4611]: I0929 13:45:48.795925 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:48 crc kubenswrapper[4611]: I0929 13:45:48.796538 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:48 crc kubenswrapper[4611]: I0929 13:45:48.863140 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:49 crc kubenswrapper[4611]: I0929 13:45:49.803681 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:52 crc kubenswrapper[4611]: I0929 13:45:52.865237 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-57qzm"] Sep 29 13:45:52 crc kubenswrapper[4611]: I0929 13:45:52.866197 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-57qzm" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="registry-server" containerID="cri-o://4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897" gracePeriod=2 Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.370521 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.445086 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wtj4\" (UniqueName: \"kubernetes.io/projected/fe948188-9910-4dc3-b0ed-d3966794efa0-kube-api-access-5wtj4\") pod \"fe948188-9910-4dc3-b0ed-d3966794efa0\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.445174 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-utilities\") pod \"fe948188-9910-4dc3-b0ed-d3966794efa0\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.445386 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-catalog-content\") pod \"fe948188-9910-4dc3-b0ed-d3966794efa0\" (UID: \"fe948188-9910-4dc3-b0ed-d3966794efa0\") " Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.446359 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-utilities" (OuterVolumeSpecName: "utilities") pod "fe948188-9910-4dc3-b0ed-d3966794efa0" (UID: "fe948188-9910-4dc3-b0ed-d3966794efa0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.455487 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe948188-9910-4dc3-b0ed-d3966794efa0-kube-api-access-5wtj4" (OuterVolumeSpecName: "kube-api-access-5wtj4") pod "fe948188-9910-4dc3-b0ed-d3966794efa0" (UID: "fe948188-9910-4dc3-b0ed-d3966794efa0"). InnerVolumeSpecName "kube-api-access-5wtj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.496336 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe948188-9910-4dc3-b0ed-d3966794efa0" (UID: "fe948188-9910-4dc3-b0ed-d3966794efa0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.547909 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wtj4\" (UniqueName: \"kubernetes.io/projected/fe948188-9910-4dc3-b0ed-d3966794efa0-kube-api-access-5wtj4\") on node \"crc\" DevicePath \"\"" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.547944 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.547954 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe948188-9910-4dc3-b0ed-d3966794efa0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.790213 4611 generic.go:334] "Generic (PLEG): container finished" podID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerID="4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897" exitCode=0 Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.790264 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57qzm" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.790275 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerDied","Data":"4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897"} Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.791511 4611 scope.go:117] "RemoveContainer" containerID="4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.792041 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57qzm" event={"ID":"fe948188-9910-4dc3-b0ed-d3966794efa0","Type":"ContainerDied","Data":"998db1692dc1a5353de3cd6a34478a3af059779f68c52b681f2311ed2b59c538"} Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.822827 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-57qzm"] Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.827511 4611 scope.go:117] "RemoveContainer" containerID="97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.834319 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-57qzm"] Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.853745 4611 scope.go:117] "RemoveContainer" containerID="8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.901080 4611 scope.go:117] "RemoveContainer" containerID="4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897" Sep 29 13:45:53 crc kubenswrapper[4611]: E0929 13:45:53.901433 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897\": container with ID starting with 4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897 not found: ID does not exist" containerID="4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.901465 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897"} err="failed to get container status \"4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897\": rpc error: code = NotFound desc = could not find container \"4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897\": container with ID starting with 4a72c3a79c34715a4949486dd501ed2effa02b15c1e63bb9dd8682be0aa33897 not found: ID does not exist" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.901489 4611 scope.go:117] "RemoveContainer" containerID="97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885" Sep 29 13:45:53 crc kubenswrapper[4611]: E0929 13:45:53.902976 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885\": container with ID starting with 97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885 not found: ID does not exist" containerID="97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.903004 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885"} err="failed to get container status \"97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885\": rpc error: code = NotFound desc = could not find container \"97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885\": container with ID starting with 97023e33826700afaf60c1226a7e1282e82fa7509e98f2745adbeabe3e976885 not found: ID does not exist" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.903023 4611 scope.go:117] "RemoveContainer" containerID="8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98" Sep 29 13:45:53 crc kubenswrapper[4611]: E0929 13:45:53.903314 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98\": container with ID starting with 8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98 not found: ID does not exist" containerID="8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98" Sep 29 13:45:53 crc kubenswrapper[4611]: I0929 13:45:53.903374 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98"} err="failed to get container status \"8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98\": rpc error: code = NotFound desc = could not find container \"8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98\": container with ID starting with 8f5842d1dc9019d7a1e14c71dd32568d3c38d372ef4ffc3c63a7be3c94f8eb98 not found: ID does not exist" Sep 29 13:45:55 crc kubenswrapper[4611]: I0929 13:45:55.737696 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:45:55 crc kubenswrapper[4611]: E0929 13:45:55.737988 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:45:55 crc kubenswrapper[4611]: I0929 13:45:55.747157 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" path="/var/lib/kubelet/pods/fe948188-9910-4dc3-b0ed-d3966794efa0/volumes" Sep 29 13:46:09 crc kubenswrapper[4611]: I0929 13:46:09.737318 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:46:09 crc kubenswrapper[4611]: E0929 13:46:09.738277 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:46:20 crc kubenswrapper[4611]: I0929 13:46:20.737018 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:46:20 crc kubenswrapper[4611]: E0929 13:46:20.737911 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:46:34 crc kubenswrapper[4611]: I0929 13:46:34.735887 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:46:34 crc kubenswrapper[4611]: E0929 13:46:34.736473 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:46:46 crc kubenswrapper[4611]: I0929 13:46:46.736840 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:46:46 crc kubenswrapper[4611]: E0929 13:46:46.737815 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:46:59 crc kubenswrapper[4611]: I0929 13:46:59.736963 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:46:59 crc kubenswrapper[4611]: E0929 13:46:59.737845 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:47:14 crc kubenswrapper[4611]: I0929 13:47:14.739031 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:47:15 crc kubenswrapper[4611]: I0929 13:47:15.669863 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"a2703c62d50efe3d3252fa87ac5888b52cd4ef144cd5d770824b4fa01fe90d02"} Sep 29 13:49:34 crc kubenswrapper[4611]: I0929 13:49:34.629298 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:49:34 crc kubenswrapper[4611]: I0929 13:49:34.630787 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:50:04 crc kubenswrapper[4611]: I0929 13:50:04.628167 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:50:04 crc kubenswrapper[4611]: I0929 13:50:04.628583 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.532509 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wfqrq"] Sep 29 13:50:16 crc kubenswrapper[4611]: E0929 13:50:16.533339 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="extract-content" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.533353 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="extract-content" Sep 29 13:50:16 crc kubenswrapper[4611]: E0929 13:50:16.533363 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="extract-utilities" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.533369 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="extract-utilities" Sep 29 13:50:16 crc kubenswrapper[4611]: E0929 13:50:16.533384 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="registry-server" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.533390 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="registry-server" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.533611 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe948188-9910-4dc3-b0ed-d3966794efa0" containerName="registry-server" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.535062 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.566492 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wfqrq"] Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.666670 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b498l\" (UniqueName: \"kubernetes.io/projected/8ba93585-5280-41d1-bca7-539afbd9c02f-kube-api-access-b498l\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.666720 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-utilities\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.666739 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-catalog-content\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.768812 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b498l\" (UniqueName: \"kubernetes.io/projected/8ba93585-5280-41d1-bca7-539afbd9c02f-kube-api-access-b498l\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.768859 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-utilities\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.768879 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-catalog-content\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.769418 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-catalog-content\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.769590 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-utilities\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.792111 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b498l\" (UniqueName: \"kubernetes.io/projected/8ba93585-5280-41d1-bca7-539afbd9c02f-kube-api-access-b498l\") pod \"redhat-operators-wfqrq\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:16 crc kubenswrapper[4611]: I0929 13:50:16.860211 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:17 crc kubenswrapper[4611]: I0929 13:50:17.404966 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wfqrq"] Sep 29 13:50:17 crc kubenswrapper[4611]: I0929 13:50:17.516435 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerStarted","Data":"e6e8520cd237e7965fe8b0249c4474189ae9111fe8ed593e2f041053f156afff"} Sep 29 13:50:18 crc kubenswrapper[4611]: I0929 13:50:18.527669 4611 generic.go:334] "Generic (PLEG): container finished" podID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerID="80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5" exitCode=0 Sep 29 13:50:18 crc kubenswrapper[4611]: I0929 13:50:18.527746 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerDied","Data":"80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5"} Sep 29 13:50:18 crc kubenswrapper[4611]: I0929 13:50:18.531118 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:50:20 crc kubenswrapper[4611]: I0929 13:50:20.566857 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerStarted","Data":"679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d"} Sep 29 13:50:24 crc kubenswrapper[4611]: I0929 13:50:24.611293 4611 generic.go:334] "Generic (PLEG): container finished" podID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerID="679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d" exitCode=0 Sep 29 13:50:24 crc kubenswrapper[4611]: I0929 13:50:24.611369 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerDied","Data":"679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d"} Sep 29 13:50:25 crc kubenswrapper[4611]: I0929 13:50:25.625442 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerStarted","Data":"bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42"} Sep 29 13:50:25 crc kubenswrapper[4611]: I0929 13:50:25.648742 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wfqrq" podStartSLOduration=2.789936522 podStartE2EDuration="9.648725263s" podCreationTimestamp="2025-09-29 13:50:16 +0000 UTC" firstStartedPulling="2025-09-29 13:50:18.530724476 +0000 UTC m=+4205.422244092" lastFinishedPulling="2025-09-29 13:50:25.389513197 +0000 UTC m=+4212.281032833" observedRunningTime="2025-09-29 13:50:25.642092841 +0000 UTC m=+4212.533612447" watchObservedRunningTime="2025-09-29 13:50:25.648725263 +0000 UTC m=+4212.540244869" Sep 29 13:50:26 crc kubenswrapper[4611]: I0929 13:50:26.861175 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:26 crc kubenswrapper[4611]: I0929 13:50:26.861696 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:27 crc kubenswrapper[4611]: I0929 13:50:27.924095 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wfqrq" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="registry-server" probeResult="failure" output=< Sep 29 13:50:27 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:50:27 crc kubenswrapper[4611]: > Sep 29 13:50:34 crc kubenswrapper[4611]: I0929 13:50:34.629027 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:50:34 crc kubenswrapper[4611]: I0929 13:50:34.629532 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:50:34 crc kubenswrapper[4611]: I0929 13:50:34.629583 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:50:34 crc kubenswrapper[4611]: I0929 13:50:34.630204 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a2703c62d50efe3d3252fa87ac5888b52cd4ef144cd5d770824b4fa01fe90d02"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:50:34 crc kubenswrapper[4611]: I0929 13:50:34.630262 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://a2703c62d50efe3d3252fa87ac5888b52cd4ef144cd5d770824b4fa01fe90d02" gracePeriod=600 Sep 29 13:50:35 crc kubenswrapper[4611]: I0929 13:50:35.714747 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="a2703c62d50efe3d3252fa87ac5888b52cd4ef144cd5d770824b4fa01fe90d02" exitCode=0 Sep 29 13:50:35 crc kubenswrapper[4611]: I0929 13:50:35.714833 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"a2703c62d50efe3d3252fa87ac5888b52cd4ef144cd5d770824b4fa01fe90d02"} Sep 29 13:50:35 crc kubenswrapper[4611]: I0929 13:50:35.715374 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e"} Sep 29 13:50:35 crc kubenswrapper[4611]: I0929 13:50:35.715409 4611 scope.go:117] "RemoveContainer" containerID="7421d1a4c74c3d7dbe44ea843fd7053a7f973df67eea30d49cb515e58dc42d24" Sep 29 13:50:37 crc kubenswrapper[4611]: I0929 13:50:37.917444 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wfqrq" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="registry-server" probeResult="failure" output=< Sep 29 13:50:37 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:50:37 crc kubenswrapper[4611]: > Sep 29 13:50:46 crc kubenswrapper[4611]: I0929 13:50:46.919022 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:46 crc kubenswrapper[4611]: I0929 13:50:46.993205 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:47 crc kubenswrapper[4611]: I0929 13:50:47.731803 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wfqrq"] Sep 29 13:50:48 crc kubenswrapper[4611]: I0929 13:50:48.846986 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wfqrq" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="registry-server" containerID="cri-o://bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42" gracePeriod=2 Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.441762 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.561961 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-catalog-content\") pod \"8ba93585-5280-41d1-bca7-539afbd9c02f\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.562116 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-utilities\") pod \"8ba93585-5280-41d1-bca7-539afbd9c02f\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.562160 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b498l\" (UniqueName: \"kubernetes.io/projected/8ba93585-5280-41d1-bca7-539afbd9c02f-kube-api-access-b498l\") pod \"8ba93585-5280-41d1-bca7-539afbd9c02f\" (UID: \"8ba93585-5280-41d1-bca7-539afbd9c02f\") " Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.562782 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-utilities" (OuterVolumeSpecName: "utilities") pod "8ba93585-5280-41d1-bca7-539afbd9c02f" (UID: "8ba93585-5280-41d1-bca7-539afbd9c02f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.582418 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ba93585-5280-41d1-bca7-539afbd9c02f-kube-api-access-b498l" (OuterVolumeSpecName: "kube-api-access-b498l") pod "8ba93585-5280-41d1-bca7-539afbd9c02f" (UID: "8ba93585-5280-41d1-bca7-539afbd9c02f"). InnerVolumeSpecName "kube-api-access-b498l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.647879 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ba93585-5280-41d1-bca7-539afbd9c02f" (UID: "8ba93585-5280-41d1-bca7-539afbd9c02f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.663747 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.663783 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ba93585-5280-41d1-bca7-539afbd9c02f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.663797 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b498l\" (UniqueName: \"kubernetes.io/projected/8ba93585-5280-41d1-bca7-539afbd9c02f-kube-api-access-b498l\") on node \"crc\" DevicePath \"\"" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.860653 4611 generic.go:334] "Generic (PLEG): container finished" podID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerID="bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42" exitCode=0 Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.860708 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerDied","Data":"bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42"} Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.860737 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wfqrq" event={"ID":"8ba93585-5280-41d1-bca7-539afbd9c02f","Type":"ContainerDied","Data":"e6e8520cd237e7965fe8b0249c4474189ae9111fe8ed593e2f041053f156afff"} Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.860749 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wfqrq" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.860785 4611 scope.go:117] "RemoveContainer" containerID="bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.892673 4611 scope.go:117] "RemoveContainer" containerID="679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.896032 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wfqrq"] Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.906805 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wfqrq"] Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.917361 4611 scope.go:117] "RemoveContainer" containerID="80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.963824 4611 scope.go:117] "RemoveContainer" containerID="bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42" Sep 29 13:50:49 crc kubenswrapper[4611]: E0929 13:50:49.965890 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42\": container with ID starting with bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42 not found: ID does not exist" containerID="bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.965934 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42"} err="failed to get container status \"bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42\": rpc error: code = NotFound desc = could not find container \"bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42\": container with ID starting with bff53742822c18b92b7892310fc7c13b39654affc1ca4b71c9d159332df74c42 not found: ID does not exist" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.965958 4611 scope.go:117] "RemoveContainer" containerID="679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d" Sep 29 13:50:49 crc kubenswrapper[4611]: E0929 13:50:49.966254 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d\": container with ID starting with 679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d not found: ID does not exist" containerID="679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.966281 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d"} err="failed to get container status \"679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d\": rpc error: code = NotFound desc = could not find container \"679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d\": container with ID starting with 679de6105aa672096944e687ab3f760352377654e6cb4cec4bea10e1f70d579d not found: ID does not exist" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.966299 4611 scope.go:117] "RemoveContainer" containerID="80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5" Sep 29 13:50:49 crc kubenswrapper[4611]: E0929 13:50:49.966503 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5\": container with ID starting with 80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5 not found: ID does not exist" containerID="80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5" Sep 29 13:50:49 crc kubenswrapper[4611]: I0929 13:50:49.966526 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5"} err="failed to get container status \"80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5\": rpc error: code = NotFound desc = could not find container \"80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5\": container with ID starting with 80d1e6a9421ccc46bbfae26f73fc263d3f54474a89a94c1ae86b827fd94792c5 not found: ID does not exist" Sep 29 13:50:51 crc kubenswrapper[4611]: I0929 13:50:51.748258 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" path="/var/lib/kubelet/pods/8ba93585-5280-41d1-bca7-539afbd9c02f/volumes" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.209007 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-657m6"] Sep 29 13:52:01 crc kubenswrapper[4611]: E0929 13:52:01.210149 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="extract-content" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.210169 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="extract-content" Sep 29 13:52:01 crc kubenswrapper[4611]: E0929 13:52:01.210187 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="extract-utilities" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.210198 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="extract-utilities" Sep 29 13:52:01 crc kubenswrapper[4611]: E0929 13:52:01.210247 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="registry-server" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.210264 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="registry-server" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.210565 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba93585-5280-41d1-bca7-539afbd9c02f" containerName="registry-server" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.212776 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.234463 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-657m6"] Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.259239 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-utilities\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.259312 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-catalog-content\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.259419 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cdpv\" (UniqueName: \"kubernetes.io/projected/5a536603-9baa-4ed0-973b-a58fb36a69e9-kube-api-access-7cdpv\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.360712 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-utilities\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.360806 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-catalog-content\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.361039 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-utilities\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.361254 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-catalog-content\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.361376 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cdpv\" (UniqueName: \"kubernetes.io/projected/5a536603-9baa-4ed0-973b-a58fb36a69e9-kube-api-access-7cdpv\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.380537 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cdpv\" (UniqueName: \"kubernetes.io/projected/5a536603-9baa-4ed0-973b-a58fb36a69e9-kube-api-access-7cdpv\") pod \"certified-operators-657m6\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:01 crc kubenswrapper[4611]: I0929 13:52:01.542782 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:02 crc kubenswrapper[4611]: I0929 13:52:02.166415 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-657m6"] Sep 29 13:52:02 crc kubenswrapper[4611]: I0929 13:52:02.676592 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerStarted","Data":"32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741"} Sep 29 13:52:02 crc kubenswrapper[4611]: I0929 13:52:02.677028 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerStarted","Data":"91515c472d92ff4698d2bed9f0b3a1b157fcd44a19bcbdaf4075f1f66e241026"} Sep 29 13:52:03 crc kubenswrapper[4611]: I0929 13:52:03.703199 4611 generic.go:334] "Generic (PLEG): container finished" podID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerID="32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741" exitCode=0 Sep 29 13:52:03 crc kubenswrapper[4611]: I0929 13:52:03.703383 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerDied","Data":"32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741"} Sep 29 13:52:04 crc kubenswrapper[4611]: I0929 13:52:04.715553 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerStarted","Data":"20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736"} Sep 29 13:52:06 crc kubenswrapper[4611]: I0929 13:52:06.733455 4611 generic.go:334] "Generic (PLEG): container finished" podID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerID="20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736" exitCode=0 Sep 29 13:52:06 crc kubenswrapper[4611]: I0929 13:52:06.733544 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerDied","Data":"20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736"} Sep 29 13:52:07 crc kubenswrapper[4611]: I0929 13:52:07.754427 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerStarted","Data":"029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59"} Sep 29 13:52:07 crc kubenswrapper[4611]: I0929 13:52:07.783174 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-657m6" podStartSLOduration=3.357175105 podStartE2EDuration="6.783152862s" podCreationTimestamp="2025-09-29 13:52:01 +0000 UTC" firstStartedPulling="2025-09-29 13:52:03.7055597 +0000 UTC m=+4310.597079306" lastFinishedPulling="2025-09-29 13:52:07.131537457 +0000 UTC m=+4314.023057063" observedRunningTime="2025-09-29 13:52:07.77687039 +0000 UTC m=+4314.668390006" watchObservedRunningTime="2025-09-29 13:52:07.783152862 +0000 UTC m=+4314.674672488" Sep 29 13:52:11 crc kubenswrapper[4611]: I0929 13:52:11.543833 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:11 crc kubenswrapper[4611]: I0929 13:52:11.544271 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:11 crc kubenswrapper[4611]: I0929 13:52:11.600364 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:21 crc kubenswrapper[4611]: I0929 13:52:21.778490 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:21 crc kubenswrapper[4611]: I0929 13:52:21.842453 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-657m6"] Sep 29 13:52:21 crc kubenswrapper[4611]: I0929 13:52:21.902674 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-657m6" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="registry-server" containerID="cri-o://029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59" gracePeriod=2 Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.444116 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.557236 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cdpv\" (UniqueName: \"kubernetes.io/projected/5a536603-9baa-4ed0-973b-a58fb36a69e9-kube-api-access-7cdpv\") pod \"5a536603-9baa-4ed0-973b-a58fb36a69e9\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.557399 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-utilities\") pod \"5a536603-9baa-4ed0-973b-a58fb36a69e9\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.557511 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-catalog-content\") pod \"5a536603-9baa-4ed0-973b-a58fb36a69e9\" (UID: \"5a536603-9baa-4ed0-973b-a58fb36a69e9\") " Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.559997 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-utilities" (OuterVolumeSpecName: "utilities") pod "5a536603-9baa-4ed0-973b-a58fb36a69e9" (UID: "5a536603-9baa-4ed0-973b-a58fb36a69e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.566199 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a536603-9baa-4ed0-973b-a58fb36a69e9-kube-api-access-7cdpv" (OuterVolumeSpecName: "kube-api-access-7cdpv") pod "5a536603-9baa-4ed0-973b-a58fb36a69e9" (UID: "5a536603-9baa-4ed0-973b-a58fb36a69e9"). InnerVolumeSpecName "kube-api-access-7cdpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.600030 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a536603-9baa-4ed0-973b-a58fb36a69e9" (UID: "5a536603-9baa-4ed0-973b-a58fb36a69e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.660158 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.660189 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a536603-9baa-4ed0-973b-a58fb36a69e9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.660201 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cdpv\" (UniqueName: \"kubernetes.io/projected/5a536603-9baa-4ed0-973b-a58fb36a69e9-kube-api-access-7cdpv\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.913927 4611 generic.go:334] "Generic (PLEG): container finished" podID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerID="029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59" exitCode=0 Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.913985 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerDied","Data":"029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59"} Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.914010 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-657m6" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.914032 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-657m6" event={"ID":"5a536603-9baa-4ed0-973b-a58fb36a69e9","Type":"ContainerDied","Data":"91515c472d92ff4698d2bed9f0b3a1b157fcd44a19bcbdaf4075f1f66e241026"} Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.914061 4611 scope.go:117] "RemoveContainer" containerID="029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.937508 4611 scope.go:117] "RemoveContainer" containerID="20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.961197 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-657m6"] Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.973996 4611 scope.go:117] "RemoveContainer" containerID="32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741" Sep 29 13:52:22 crc kubenswrapper[4611]: I0929 13:52:22.977380 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-657m6"] Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.029772 4611 scope.go:117] "RemoveContainer" containerID="029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59" Sep 29 13:52:23 crc kubenswrapper[4611]: E0929 13:52:23.032115 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59\": container with ID starting with 029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59 not found: ID does not exist" containerID="029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59" Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.032184 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59"} err="failed to get container status \"029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59\": rpc error: code = NotFound desc = could not find container \"029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59\": container with ID starting with 029d1115cb517c58557a302a87cdcde52b0cc0469bf1144149f986f3c6974d59 not found: ID does not exist" Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.032218 4611 scope.go:117] "RemoveContainer" containerID="20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736" Sep 29 13:52:23 crc kubenswrapper[4611]: E0929 13:52:23.032779 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736\": container with ID starting with 20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736 not found: ID does not exist" containerID="20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736" Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.032824 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736"} err="failed to get container status \"20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736\": rpc error: code = NotFound desc = could not find container \"20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736\": container with ID starting with 20f101694d170e138571325d6038960c635c27a05188c4acbe2021ccfa59d736 not found: ID does not exist" Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.032853 4611 scope.go:117] "RemoveContainer" containerID="32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741" Sep 29 13:52:23 crc kubenswrapper[4611]: E0929 13:52:23.033163 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741\": container with ID starting with 32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741 not found: ID does not exist" containerID="32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741" Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.033194 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741"} err="failed to get container status \"32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741\": rpc error: code = NotFound desc = could not find container \"32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741\": container with ID starting with 32e40e9f03187178aa86a6c5f0b53ed90ca880bf5557ec42d103cebd2a16d741 not found: ID does not exist" Sep 29 13:52:23 crc kubenswrapper[4611]: I0929 13:52:23.745810 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" path="/var/lib/kubelet/pods/5a536603-9baa-4ed0-973b-a58fb36a69e9/volumes" Sep 29 13:52:49 crc kubenswrapper[4611]: I0929 13:52:49.992509 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wgrcl"] Sep 29 13:52:49 crc kubenswrapper[4611]: E0929 13:52:49.993350 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="extract-content" Sep 29 13:52:49 crc kubenswrapper[4611]: I0929 13:52:49.993362 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="extract-content" Sep 29 13:52:49 crc kubenswrapper[4611]: E0929 13:52:49.993381 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="registry-server" Sep 29 13:52:49 crc kubenswrapper[4611]: I0929 13:52:49.993388 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="registry-server" Sep 29 13:52:49 crc kubenswrapper[4611]: E0929 13:52:49.993410 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="extract-utilities" Sep 29 13:52:49 crc kubenswrapper[4611]: I0929 13:52:49.993427 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="extract-utilities" Sep 29 13:52:49 crc kubenswrapper[4611]: I0929 13:52:49.993617 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a536603-9baa-4ed0-973b-a58fb36a69e9" containerName="registry-server" Sep 29 13:52:49 crc kubenswrapper[4611]: I0929 13:52:49.994870 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.006173 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgrcl"] Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.085126 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-catalog-content\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.085246 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-utilities\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.085282 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m65nn\" (UniqueName: \"kubernetes.io/projected/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-kube-api-access-m65nn\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.189039 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-catalog-content\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.189140 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-utilities\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.189295 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m65nn\" (UniqueName: \"kubernetes.io/projected/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-kube-api-access-m65nn\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.189884 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-utilities\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.190139 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-catalog-content\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.213752 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m65nn\" (UniqueName: \"kubernetes.io/projected/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-kube-api-access-m65nn\") pod \"redhat-marketplace-wgrcl\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:50 crc kubenswrapper[4611]: I0929 13:52:50.317517 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:52:51 crc kubenswrapper[4611]: I0929 13:52:51.216319 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgrcl"] Sep 29 13:52:52 crc kubenswrapper[4611]: I0929 13:52:52.227764 4611 generic.go:334] "Generic (PLEG): container finished" podID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerID="025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb" exitCode=0 Sep 29 13:52:52 crc kubenswrapper[4611]: I0929 13:52:52.228168 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgrcl" event={"ID":"eae9ac17-9e00-423e-9f93-1a6fe0e61c58","Type":"ContainerDied","Data":"025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb"} Sep 29 13:52:52 crc kubenswrapper[4611]: I0929 13:52:52.228201 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgrcl" event={"ID":"eae9ac17-9e00-423e-9f93-1a6fe0e61c58","Type":"ContainerStarted","Data":"8e3e4f6d5eaab646a8cb1551f0d826c59295cffdd37c21f84599a8c766cd87c4"} Sep 29 13:52:54 crc kubenswrapper[4611]: I0929 13:52:54.260243 4611 generic.go:334] "Generic (PLEG): container finished" podID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerID="997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc" exitCode=0 Sep 29 13:52:54 crc kubenswrapper[4611]: I0929 13:52:54.260329 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgrcl" event={"ID":"eae9ac17-9e00-423e-9f93-1a6fe0e61c58","Type":"ContainerDied","Data":"997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc"} Sep 29 13:52:55 crc kubenswrapper[4611]: I0929 13:52:55.270864 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgrcl" event={"ID":"eae9ac17-9e00-423e-9f93-1a6fe0e61c58","Type":"ContainerStarted","Data":"ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3"} Sep 29 13:52:55 crc kubenswrapper[4611]: I0929 13:52:55.327425 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wgrcl" podStartSLOduration=3.782136308 podStartE2EDuration="6.327402103s" podCreationTimestamp="2025-09-29 13:52:49 +0000 UTC" firstStartedPulling="2025-09-29 13:52:52.230426943 +0000 UTC m=+4359.121946549" lastFinishedPulling="2025-09-29 13:52:54.775692748 +0000 UTC m=+4361.667212344" observedRunningTime="2025-09-29 13:52:55.32106872 +0000 UTC m=+4362.212588326" watchObservedRunningTime="2025-09-29 13:52:55.327402103 +0000 UTC m=+4362.218921709" Sep 29 13:53:00 crc kubenswrapper[4611]: I0929 13:53:00.318458 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:53:00 crc kubenswrapper[4611]: I0929 13:53:00.320241 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:53:00 crc kubenswrapper[4611]: I0929 13:53:00.483666 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:53:01 crc kubenswrapper[4611]: I0929 13:53:01.377343 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:53:01 crc kubenswrapper[4611]: I0929 13:53:01.440785 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgrcl"] Sep 29 13:53:03 crc kubenswrapper[4611]: I0929 13:53:03.345737 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wgrcl" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="registry-server" containerID="cri-o://ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3" gracePeriod=2 Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.069244 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.173150 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-utilities\") pod \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.173306 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m65nn\" (UniqueName: \"kubernetes.io/projected/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-kube-api-access-m65nn\") pod \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.173348 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-catalog-content\") pod \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\" (UID: \"eae9ac17-9e00-423e-9f93-1a6fe0e61c58\") " Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.174881 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-utilities" (OuterVolumeSpecName: "utilities") pod "eae9ac17-9e00-423e-9f93-1a6fe0e61c58" (UID: "eae9ac17-9e00-423e-9f93-1a6fe0e61c58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.182977 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-kube-api-access-m65nn" (OuterVolumeSpecName: "kube-api-access-m65nn") pod "eae9ac17-9e00-423e-9f93-1a6fe0e61c58" (UID: "eae9ac17-9e00-423e-9f93-1a6fe0e61c58"). InnerVolumeSpecName "kube-api-access-m65nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.187023 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eae9ac17-9e00-423e-9f93-1a6fe0e61c58" (UID: "eae9ac17-9e00-423e-9f93-1a6fe0e61c58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.275995 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.276031 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.276042 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m65nn\" (UniqueName: \"kubernetes.io/projected/eae9ac17-9e00-423e-9f93-1a6fe0e61c58-kube-api-access-m65nn\") on node \"crc\" DevicePath \"\"" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.356838 4611 generic.go:334] "Generic (PLEG): container finished" podID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerID="ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3" exitCode=0 Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.356889 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgrcl" event={"ID":"eae9ac17-9e00-423e-9f93-1a6fe0e61c58","Type":"ContainerDied","Data":"ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3"} Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.356922 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgrcl" event={"ID":"eae9ac17-9e00-423e-9f93-1a6fe0e61c58","Type":"ContainerDied","Data":"8e3e4f6d5eaab646a8cb1551f0d826c59295cffdd37c21f84599a8c766cd87c4"} Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.356919 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgrcl" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.356944 4611 scope.go:117] "RemoveContainer" containerID="ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.390619 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgrcl"] Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.392245 4611 scope.go:117] "RemoveContainer" containerID="997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.411337 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgrcl"] Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.423057 4611 scope.go:117] "RemoveContainer" containerID="025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.472388 4611 scope.go:117] "RemoveContainer" containerID="ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3" Sep 29 13:53:04 crc kubenswrapper[4611]: E0929 13:53:04.495767 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3\": container with ID starting with ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3 not found: ID does not exist" containerID="ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.495811 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3"} err="failed to get container status \"ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3\": rpc error: code = NotFound desc = could not find container \"ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3\": container with ID starting with ef37ed6afc5987947c0cd24731130e743ecab496c92c3846c55d9453716910d3 not found: ID does not exist" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.495838 4611 scope.go:117] "RemoveContainer" containerID="997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc" Sep 29 13:53:04 crc kubenswrapper[4611]: E0929 13:53:04.496757 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc\": container with ID starting with 997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc not found: ID does not exist" containerID="997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.496827 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc"} err="failed to get container status \"997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc\": rpc error: code = NotFound desc = could not find container \"997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc\": container with ID starting with 997daf4e9366961cdd09b18e5eda4ff129147385b972be1147659e6ea88f11fc not found: ID does not exist" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.496862 4611 scope.go:117] "RemoveContainer" containerID="025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb" Sep 29 13:53:04 crc kubenswrapper[4611]: E0929 13:53:04.497238 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb\": container with ID starting with 025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb not found: ID does not exist" containerID="025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.497269 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb"} err="failed to get container status \"025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb\": rpc error: code = NotFound desc = could not find container \"025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb\": container with ID starting with 025f5843e3aa009fa97d1adaefb5bf3e706fdb4b878442d7f40c6b7dd76dc5fb not found: ID does not exist" Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.629087 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:53:04 crc kubenswrapper[4611]: I0929 13:53:04.629346 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:53:05 crc kubenswrapper[4611]: I0929 13:53:05.785833 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" path="/var/lib/kubelet/pods/eae9ac17-9e00-423e-9f93-1a6fe0e61c58/volumes" Sep 29 13:53:34 crc kubenswrapper[4611]: I0929 13:53:34.628256 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:53:34 crc kubenswrapper[4611]: I0929 13:53:34.629573 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.628869 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.629567 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.629652 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.630503 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.630564 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" gracePeriod=600 Sep 29 13:54:04 crc kubenswrapper[4611]: E0929 13:54:04.804653 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.976322 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" exitCode=0 Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.976408 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e"} Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.976602 4611 scope.go:117] "RemoveContainer" containerID="a2703c62d50efe3d3252fa87ac5888b52cd4ef144cd5d770824b4fa01fe90d02" Sep 29 13:54:04 crc kubenswrapper[4611]: I0929 13:54:04.979618 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:54:04 crc kubenswrapper[4611]: E0929 13:54:04.980197 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:54:16 crc kubenswrapper[4611]: I0929 13:54:16.736497 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:54:16 crc kubenswrapper[4611]: E0929 13:54:16.737195 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:54:28 crc kubenswrapper[4611]: I0929 13:54:28.736881 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:54:28 crc kubenswrapper[4611]: E0929 13:54:28.737767 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:54:39 crc kubenswrapper[4611]: I0929 13:54:39.737041 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:54:39 crc kubenswrapper[4611]: E0929 13:54:39.739066 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:54:51 crc kubenswrapper[4611]: I0929 13:54:51.736949 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:54:51 crc kubenswrapper[4611]: E0929 13:54:51.738794 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:55:03 crc kubenswrapper[4611]: I0929 13:55:03.744873 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:55:03 crc kubenswrapper[4611]: E0929 13:55:03.746353 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:55:18 crc kubenswrapper[4611]: I0929 13:55:18.738665 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:55:18 crc kubenswrapper[4611]: E0929 13:55:18.739505 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:55:29 crc kubenswrapper[4611]: I0929 13:55:29.736833 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:55:29 crc kubenswrapper[4611]: E0929 13:55:29.737755 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:55:41 crc kubenswrapper[4611]: I0929 13:55:41.737790 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:55:41 crc kubenswrapper[4611]: E0929 13:55:41.738600 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:55:56 crc kubenswrapper[4611]: I0929 13:55:56.737164 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:55:56 crc kubenswrapper[4611]: E0929 13:55:56.737921 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:56:11 crc kubenswrapper[4611]: I0929 13:56:11.736511 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:56:11 crc kubenswrapper[4611]: E0929 13:56:11.737312 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:56:24 crc kubenswrapper[4611]: I0929 13:56:24.737070 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:56:24 crc kubenswrapper[4611]: E0929 13:56:24.737896 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:56:37 crc kubenswrapper[4611]: I0929 13:56:37.737186 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:56:37 crc kubenswrapper[4611]: E0929 13:56:37.738562 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:56:51 crc kubenswrapper[4611]: I0929 13:56:51.742821 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:56:51 crc kubenswrapper[4611]: E0929 13:56:51.744780 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:57:05 crc kubenswrapper[4611]: I0929 13:57:05.736437 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:57:05 crc kubenswrapper[4611]: E0929 13:57:05.737173 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:57:16 crc kubenswrapper[4611]: I0929 13:57:16.736932 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:57:16 crc kubenswrapper[4611]: E0929 13:57:16.737661 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:57:29 crc kubenswrapper[4611]: I0929 13:57:29.736614 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:57:29 crc kubenswrapper[4611]: E0929 13:57:29.737464 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:57:43 crc kubenswrapper[4611]: I0929 13:57:43.751864 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:57:43 crc kubenswrapper[4611]: E0929 13:57:43.753447 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:57:57 crc kubenswrapper[4611]: I0929 13:57:57.736345 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:57:57 crc kubenswrapper[4611]: E0929 13:57:57.737066 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:58:09 crc kubenswrapper[4611]: I0929 13:58:09.736756 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:58:09 crc kubenswrapper[4611]: E0929 13:58:09.737747 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.281906 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f4wn5"] Sep 29 13:58:14 crc kubenswrapper[4611]: E0929 13:58:14.282770 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="extract-utilities" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.282787 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="extract-utilities" Sep 29 13:58:14 crc kubenswrapper[4611]: E0929 13:58:14.282820 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="registry-server" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.282829 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="registry-server" Sep 29 13:58:14 crc kubenswrapper[4611]: E0929 13:58:14.282848 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="extract-content" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.282856 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="extract-content" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.283104 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="eae9ac17-9e00-423e-9f93-1a6fe0e61c58" containerName="registry-server" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.284885 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.297136 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f4wn5"] Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.452905 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-catalog-content\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.453181 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-utilities\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.453318 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsnxg\" (UniqueName: \"kubernetes.io/projected/10a54363-63cd-4c25-ad0f-b8d0346426ea-kube-api-access-gsnxg\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.554805 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-utilities\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.554867 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsnxg\" (UniqueName: \"kubernetes.io/projected/10a54363-63cd-4c25-ad0f-b8d0346426ea-kube-api-access-gsnxg\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.554957 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-catalog-content\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.555434 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-catalog-content\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.555476 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-utilities\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.578326 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsnxg\" (UniqueName: \"kubernetes.io/projected/10a54363-63cd-4c25-ad0f-b8d0346426ea-kube-api-access-gsnxg\") pod \"community-operators-f4wn5\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:14 crc kubenswrapper[4611]: I0929 13:58:14.623220 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:15 crc kubenswrapper[4611]: I0929 13:58:15.145762 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f4wn5"] Sep 29 13:58:15 crc kubenswrapper[4611]: I0929 13:58:15.542840 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerStarted","Data":"2e89fe5d81783f2c4e7f01a707a082a5c76fdd64a92bcb2905b95410cb8f021e"} Sep 29 13:58:16 crc kubenswrapper[4611]: I0929 13:58:16.552344 4611 generic.go:334] "Generic (PLEG): container finished" podID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerID="2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f" exitCode=0 Sep 29 13:58:16 crc kubenswrapper[4611]: I0929 13:58:16.552392 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerDied","Data":"2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f"} Sep 29 13:58:16 crc kubenswrapper[4611]: I0929 13:58:16.554651 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:58:17 crc kubenswrapper[4611]: I0929 13:58:17.565155 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerStarted","Data":"e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2"} Sep 29 13:58:20 crc kubenswrapper[4611]: I0929 13:58:20.600249 4611 generic.go:334] "Generic (PLEG): container finished" podID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerID="e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2" exitCode=0 Sep 29 13:58:20 crc kubenswrapper[4611]: I0929 13:58:20.600447 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerDied","Data":"e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2"} Sep 29 13:58:21 crc kubenswrapper[4611]: I0929 13:58:21.618468 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerStarted","Data":"7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b"} Sep 29 13:58:24 crc kubenswrapper[4611]: I0929 13:58:24.625064 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:24 crc kubenswrapper[4611]: I0929 13:58:24.628844 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:24 crc kubenswrapper[4611]: I0929 13:58:24.736138 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:58:24 crc kubenswrapper[4611]: E0929 13:58:24.736442 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:58:25 crc kubenswrapper[4611]: I0929 13:58:25.783448 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-f4wn5" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="registry-server" probeResult="failure" output=< Sep 29 13:58:25 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:58:25 crc kubenswrapper[4611]: > Sep 29 13:58:35 crc kubenswrapper[4611]: I0929 13:58:35.684555 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-f4wn5" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="registry-server" probeResult="failure" output=< Sep 29 13:58:35 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 13:58:35 crc kubenswrapper[4611]: > Sep 29 13:58:39 crc kubenswrapper[4611]: I0929 13:58:39.738050 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:58:39 crc kubenswrapper[4611]: E0929 13:58:39.738969 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:58:44 crc kubenswrapper[4611]: I0929 13:58:44.680038 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:44 crc kubenswrapper[4611]: I0929 13:58:44.747468 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f4wn5" podStartSLOduration=26.148583279 podStartE2EDuration="30.747446575s" podCreationTimestamp="2025-09-29 13:58:14 +0000 UTC" firstStartedPulling="2025-09-29 13:58:16.554364273 +0000 UTC m=+4683.445883879" lastFinishedPulling="2025-09-29 13:58:21.153227569 +0000 UTC m=+4688.044747175" observedRunningTime="2025-09-29 13:58:21.644475816 +0000 UTC m=+4688.535995432" watchObservedRunningTime="2025-09-29 13:58:44.747446575 +0000 UTC m=+4711.638966181" Sep 29 13:58:44 crc kubenswrapper[4611]: I0929 13:58:44.759432 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:45 crc kubenswrapper[4611]: I0929 13:58:45.499157 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f4wn5"] Sep 29 13:58:45 crc kubenswrapper[4611]: I0929 13:58:45.896641 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f4wn5" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="registry-server" containerID="cri-o://7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b" gracePeriod=2 Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.716806 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.844316 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-utilities\") pod \"10a54363-63cd-4c25-ad0f-b8d0346426ea\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.844399 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-catalog-content\") pod \"10a54363-63cd-4c25-ad0f-b8d0346426ea\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.844496 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsnxg\" (UniqueName: \"kubernetes.io/projected/10a54363-63cd-4c25-ad0f-b8d0346426ea-kube-api-access-gsnxg\") pod \"10a54363-63cd-4c25-ad0f-b8d0346426ea\" (UID: \"10a54363-63cd-4c25-ad0f-b8d0346426ea\") " Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.846176 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-utilities" (OuterVolumeSpecName: "utilities") pod "10a54363-63cd-4c25-ad0f-b8d0346426ea" (UID: "10a54363-63cd-4c25-ad0f-b8d0346426ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.852619 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10a54363-63cd-4c25-ad0f-b8d0346426ea-kube-api-access-gsnxg" (OuterVolumeSpecName: "kube-api-access-gsnxg") pod "10a54363-63cd-4c25-ad0f-b8d0346426ea" (UID: "10a54363-63cd-4c25-ad0f-b8d0346426ea"). InnerVolumeSpecName "kube-api-access-gsnxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.906192 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10a54363-63cd-4c25-ad0f-b8d0346426ea" (UID: "10a54363-63cd-4c25-ad0f-b8d0346426ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.908950 4611 generic.go:334] "Generic (PLEG): container finished" podID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerID="7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b" exitCode=0 Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.908998 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerDied","Data":"7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b"} Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.909022 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f4wn5" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.909030 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f4wn5" event={"ID":"10a54363-63cd-4c25-ad0f-b8d0346426ea","Type":"ContainerDied","Data":"2e89fe5d81783f2c4e7f01a707a082a5c76fdd64a92bcb2905b95410cb8f021e"} Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.909051 4611 scope.go:117] "RemoveContainer" containerID="7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.932876 4611 scope.go:117] "RemoveContainer" containerID="e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.946697 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.946751 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsnxg\" (UniqueName: \"kubernetes.io/projected/10a54363-63cd-4c25-ad0f-b8d0346426ea-kube-api-access-gsnxg\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.946765 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a54363-63cd-4c25-ad0f-b8d0346426ea-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:46 crc kubenswrapper[4611]: I0929 13:58:46.968594 4611 scope.go:117] "RemoveContainer" containerID="2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.010145 4611 scope.go:117] "RemoveContainer" containerID="7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b" Sep 29 13:58:47 crc kubenswrapper[4611]: E0929 13:58:47.012933 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b\": container with ID starting with 7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b not found: ID does not exist" containerID="7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.012981 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b"} err="failed to get container status \"7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b\": rpc error: code = NotFound desc = could not find container \"7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b\": container with ID starting with 7434a903bce5d5d150b25f53c30f1acb11edcd06d262e4e16b6fd34e7fe0ba2b not found: ID does not exist" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.013011 4611 scope.go:117] "RemoveContainer" containerID="e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2" Sep 29 13:58:47 crc kubenswrapper[4611]: E0929 13:58:47.014327 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2\": container with ID starting with e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2 not found: ID does not exist" containerID="e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.014389 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2"} err="failed to get container status \"e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2\": rpc error: code = NotFound desc = could not find container \"e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2\": container with ID starting with e9a56f01452fd9d1f8a3886fea2a2fd02af1eb21ca5a4f8a404710ca3d3f45e2 not found: ID does not exist" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.014423 4611 scope.go:117] "RemoveContainer" containerID="2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f" Sep 29 13:58:47 crc kubenswrapper[4611]: E0929 13:58:47.019150 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f\": container with ID starting with 2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f not found: ID does not exist" containerID="2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.019190 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f"} err="failed to get container status \"2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f\": rpc error: code = NotFound desc = could not find container \"2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f\": container with ID starting with 2897ded1a799d1008a1aab00462fcfb999fa58fcdc0f9d9f21304a93043b5a1f not found: ID does not exist" Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.029853 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f4wn5"] Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.036904 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f4wn5"] Sep 29 13:58:47 crc kubenswrapper[4611]: I0929 13:58:47.749082 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" path="/var/lib/kubelet/pods/10a54363-63cd-4c25-ad0f-b8d0346426ea/volumes" Sep 29 13:58:53 crc kubenswrapper[4611]: I0929 13:58:53.749222 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:58:53 crc kubenswrapper[4611]: E0929 13:58:53.749975 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 13:59:07 crc kubenswrapper[4611]: I0929 13:59:07.736816 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 13:59:08 crc kubenswrapper[4611]: I0929 13:59:08.113406 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"4923a2c81c48e8c95b663bf0a126e00604c30dc0282e8f7fdb4ee29ec45d4c17"} Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.151854 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt"] Sep 29 14:00:00 crc kubenswrapper[4611]: E0929 14:00:00.152818 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="registry-server" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.152834 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="registry-server" Sep 29 14:00:00 crc kubenswrapper[4611]: E0929 14:00:00.152866 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="extract-utilities" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.152873 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="extract-utilities" Sep 29 14:00:00 crc kubenswrapper[4611]: E0929 14:00:00.152892 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="extract-content" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.152900 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="extract-content" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.153124 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="10a54363-63cd-4c25-ad0f-b8d0346426ea" containerName="registry-server" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.153961 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.160193 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.160733 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.173093 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt"] Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.326911 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjsv4\" (UniqueName: \"kubernetes.io/projected/3840a5d7-6450-4443-9b6a-a39e2e63e754-kube-api-access-zjsv4\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.327068 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3840a5d7-6450-4443-9b6a-a39e2e63e754-config-volume\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.327100 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3840a5d7-6450-4443-9b6a-a39e2e63e754-secret-volume\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.429161 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjsv4\" (UniqueName: \"kubernetes.io/projected/3840a5d7-6450-4443-9b6a-a39e2e63e754-kube-api-access-zjsv4\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.429449 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3840a5d7-6450-4443-9b6a-a39e2e63e754-config-volume\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.429545 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3840a5d7-6450-4443-9b6a-a39e2e63e754-secret-volume\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.430981 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3840a5d7-6450-4443-9b6a-a39e2e63e754-config-volume\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.443363 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3840a5d7-6450-4443-9b6a-a39e2e63e754-secret-volume\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.454008 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjsv4\" (UniqueName: \"kubernetes.io/projected/3840a5d7-6450-4443-9b6a-a39e2e63e754-kube-api-access-zjsv4\") pod \"collect-profiles-29319240-gxcpt\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:00 crc kubenswrapper[4611]: I0929 14:00:00.527207 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:01 crc kubenswrapper[4611]: I0929 14:00:01.010880 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt"] Sep 29 14:00:01 crc kubenswrapper[4611]: I0929 14:00:01.699011 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" event={"ID":"3840a5d7-6450-4443-9b6a-a39e2e63e754","Type":"ContainerStarted","Data":"46aeb38a7a82fdd84fdc1f8573bd05d4796224b26a11ed6808f5ca86c1355b4e"} Sep 29 14:00:01 crc kubenswrapper[4611]: I0929 14:00:01.699385 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" event={"ID":"3840a5d7-6450-4443-9b6a-a39e2e63e754","Type":"ContainerStarted","Data":"22381dca2f2f00c074c64bb6b9c2809c487bc19f02e88f2b5f686d65e7d4c2a1"} Sep 29 14:00:01 crc kubenswrapper[4611]: I0929 14:00:01.716241 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" podStartSLOduration=1.716221073 podStartE2EDuration="1.716221073s" podCreationTimestamp="2025-09-29 14:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:00:01.713107683 +0000 UTC m=+4788.604627289" watchObservedRunningTime="2025-09-29 14:00:01.716221073 +0000 UTC m=+4788.607740669" Sep 29 14:00:02 crc kubenswrapper[4611]: I0929 14:00:02.724092 4611 generic.go:334] "Generic (PLEG): container finished" podID="3840a5d7-6450-4443-9b6a-a39e2e63e754" containerID="46aeb38a7a82fdd84fdc1f8573bd05d4796224b26a11ed6808f5ca86c1355b4e" exitCode=0 Sep 29 14:00:02 crc kubenswrapper[4611]: I0929 14:00:02.724402 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" event={"ID":"3840a5d7-6450-4443-9b6a-a39e2e63e754","Type":"ContainerDied","Data":"46aeb38a7a82fdd84fdc1f8573bd05d4796224b26a11ed6808f5ca86c1355b4e"} Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.154392 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.196665 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3840a5d7-6450-4443-9b6a-a39e2e63e754-config-volume\") pod \"3840a5d7-6450-4443-9b6a-a39e2e63e754\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.196872 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3840a5d7-6450-4443-9b6a-a39e2e63e754-secret-volume\") pod \"3840a5d7-6450-4443-9b6a-a39e2e63e754\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.196985 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjsv4\" (UniqueName: \"kubernetes.io/projected/3840a5d7-6450-4443-9b6a-a39e2e63e754-kube-api-access-zjsv4\") pod \"3840a5d7-6450-4443-9b6a-a39e2e63e754\" (UID: \"3840a5d7-6450-4443-9b6a-a39e2e63e754\") " Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.197548 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3840a5d7-6450-4443-9b6a-a39e2e63e754-config-volume" (OuterVolumeSpecName: "config-volume") pod "3840a5d7-6450-4443-9b6a-a39e2e63e754" (UID: "3840a5d7-6450-4443-9b6a-a39e2e63e754"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.198243 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3840a5d7-6450-4443-9b6a-a39e2e63e754-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.203252 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3840a5d7-6450-4443-9b6a-a39e2e63e754-kube-api-access-zjsv4" (OuterVolumeSpecName: "kube-api-access-zjsv4") pod "3840a5d7-6450-4443-9b6a-a39e2e63e754" (UID: "3840a5d7-6450-4443-9b6a-a39e2e63e754"). InnerVolumeSpecName "kube-api-access-zjsv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.203925 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3840a5d7-6450-4443-9b6a-a39e2e63e754-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3840a5d7-6450-4443-9b6a-a39e2e63e754" (UID: "3840a5d7-6450-4443-9b6a-a39e2e63e754"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.301321 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3840a5d7-6450-4443-9b6a-a39e2e63e754-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.301359 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjsv4\" (UniqueName: \"kubernetes.io/projected/3840a5d7-6450-4443-9b6a-a39e2e63e754-kube-api-access-zjsv4\") on node \"crc\" DevicePath \"\"" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.744243 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" event={"ID":"3840a5d7-6450-4443-9b6a-a39e2e63e754","Type":"ContainerDied","Data":"22381dca2f2f00c074c64bb6b9c2809c487bc19f02e88f2b5f686d65e7d4c2a1"} Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.744606 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22381dca2f2f00c074c64bb6b9c2809c487bc19f02e88f2b5f686d65e7d4c2a1" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.744684 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-gxcpt" Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.807813 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz"] Sep 29 14:00:04 crc kubenswrapper[4611]: I0929 14:00:04.817419 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319195-kwcgz"] Sep 29 14:00:05 crc kubenswrapper[4611]: I0929 14:00:05.767448 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31987558-1398-4162-91c5-67884f605277" path="/var/lib/kubelet/pods/31987558-1398-4162-91c5-67884f605277/volumes" Sep 29 14:00:38 crc kubenswrapper[4611]: I0929 14:00:38.513251 4611 scope.go:117] "RemoveContainer" containerID="5d853325eb8fcc2ce2294563a1851bc5d83eceda582b0c667b627ba37f74e121" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.166360 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319241-dcp5m"] Sep 29 14:01:00 crc kubenswrapper[4611]: E0929 14:01:00.168467 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3840a5d7-6450-4443-9b6a-a39e2e63e754" containerName="collect-profiles" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.168582 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3840a5d7-6450-4443-9b6a-a39e2e63e754" containerName="collect-profiles" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.168938 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="3840a5d7-6450-4443-9b6a-a39e2e63e754" containerName="collect-profiles" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.169874 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.178279 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319241-dcp5m"] Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.205574 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6hzb\" (UniqueName: \"kubernetes.io/projected/be7463cf-87c5-4053-94a8-162e3c310e92-kube-api-access-x6hzb\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.205738 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-fernet-keys\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.205971 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-combined-ca-bundle\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.206025 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-config-data\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.308282 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-combined-ca-bundle\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.308657 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-config-data\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.308824 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6hzb\" (UniqueName: \"kubernetes.io/projected/be7463cf-87c5-4053-94a8-162e3c310e92-kube-api-access-x6hzb\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.309005 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-fernet-keys\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.325431 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-fernet-keys\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.325454 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-combined-ca-bundle\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.326264 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-config-data\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.328612 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6hzb\" (UniqueName: \"kubernetes.io/projected/be7463cf-87c5-4053-94a8-162e3c310e92-kube-api-access-x6hzb\") pod \"keystone-cron-29319241-dcp5m\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:00 crc kubenswrapper[4611]: I0929 14:01:00.542504 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:01 crc kubenswrapper[4611]: I0929 14:01:01.092310 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319241-dcp5m"] Sep 29 14:01:01 crc kubenswrapper[4611]: I0929 14:01:01.347371 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319241-dcp5m" event={"ID":"be7463cf-87c5-4053-94a8-162e3c310e92","Type":"ContainerStarted","Data":"5848494c6d298a5cfacab1f335fd15a2f1e6df278e361f89c5f9125fa08cb246"} Sep 29 14:01:02 crc kubenswrapper[4611]: I0929 14:01:02.360912 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319241-dcp5m" event={"ID":"be7463cf-87c5-4053-94a8-162e3c310e92","Type":"ContainerStarted","Data":"0944debb4b3b7c920f774ae2e1baf3430e82b089db941dd5954ffa04c1cb40fd"} Sep 29 14:01:02 crc kubenswrapper[4611]: I0929 14:01:02.395401 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319241-dcp5m" podStartSLOduration=2.395375067 podStartE2EDuration="2.395375067s" podCreationTimestamp="2025-09-29 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:01:02.38200903 +0000 UTC m=+4849.273528656" watchObservedRunningTime="2025-09-29 14:01:02.395375067 +0000 UTC m=+4849.286894723" Sep 29 14:01:11 crc kubenswrapper[4611]: I0929 14:01:11.457153 4611 generic.go:334] "Generic (PLEG): container finished" podID="be7463cf-87c5-4053-94a8-162e3c310e92" containerID="0944debb4b3b7c920f774ae2e1baf3430e82b089db941dd5954ffa04c1cb40fd" exitCode=0 Sep 29 14:01:11 crc kubenswrapper[4611]: I0929 14:01:11.457335 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319241-dcp5m" event={"ID":"be7463cf-87c5-4053-94a8-162e3c310e92","Type":"ContainerDied","Data":"0944debb4b3b7c920f774ae2e1baf3430e82b089db941dd5954ffa04c1cb40fd"} Sep 29 14:01:12 crc kubenswrapper[4611]: I0929 14:01:12.985448 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.102196 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-fernet-keys\") pod \"be7463cf-87c5-4053-94a8-162e3c310e92\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.102371 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-combined-ca-bundle\") pod \"be7463cf-87c5-4053-94a8-162e3c310e92\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.102540 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6hzb\" (UniqueName: \"kubernetes.io/projected/be7463cf-87c5-4053-94a8-162e3c310e92-kube-api-access-x6hzb\") pod \"be7463cf-87c5-4053-94a8-162e3c310e92\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.102583 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-config-data\") pod \"be7463cf-87c5-4053-94a8-162e3c310e92\" (UID: \"be7463cf-87c5-4053-94a8-162e3c310e92\") " Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.108074 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "be7463cf-87c5-4053-94a8-162e3c310e92" (UID: "be7463cf-87c5-4053-94a8-162e3c310e92"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.109852 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be7463cf-87c5-4053-94a8-162e3c310e92-kube-api-access-x6hzb" (OuterVolumeSpecName: "kube-api-access-x6hzb") pod "be7463cf-87c5-4053-94a8-162e3c310e92" (UID: "be7463cf-87c5-4053-94a8-162e3c310e92"). InnerVolumeSpecName "kube-api-access-x6hzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.145149 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be7463cf-87c5-4053-94a8-162e3c310e92" (UID: "be7463cf-87c5-4053-94a8-162e3c310e92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.160401 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-config-data" (OuterVolumeSpecName: "config-data") pod "be7463cf-87c5-4053-94a8-162e3c310e92" (UID: "be7463cf-87c5-4053-94a8-162e3c310e92"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.204482 4611 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.204518 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6hzb\" (UniqueName: \"kubernetes.io/projected/be7463cf-87c5-4053-94a8-162e3c310e92-kube-api-access-x6hzb\") on node \"crc\" DevicePath \"\"" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.204531 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.204541 4611 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/be7463cf-87c5-4053-94a8-162e3c310e92-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.476084 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319241-dcp5m" event={"ID":"be7463cf-87c5-4053-94a8-162e3c310e92","Type":"ContainerDied","Data":"5848494c6d298a5cfacab1f335fd15a2f1e6df278e361f89c5f9125fa08cb246"} Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.476121 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5848494c6d298a5cfacab1f335fd15a2f1e6df278e361f89c5f9125fa08cb246" Sep 29 14:01:13 crc kubenswrapper[4611]: I0929 14:01:13.476128 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319241-dcp5m" Sep 29 14:01:34 crc kubenswrapper[4611]: I0929 14:01:34.628793 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:01:34 crc kubenswrapper[4611]: I0929 14:01:34.629262 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:02:04 crc kubenswrapper[4611]: I0929 14:02:04.628462 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:02:04 crc kubenswrapper[4611]: I0929 14:02:04.629030 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:02:08 crc kubenswrapper[4611]: I0929 14:02:08.971708 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-npx8p"] Sep 29 14:02:08 crc kubenswrapper[4611]: E0929 14:02:08.972702 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be7463cf-87c5-4053-94a8-162e3c310e92" containerName="keystone-cron" Sep 29 14:02:08 crc kubenswrapper[4611]: I0929 14:02:08.972718 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="be7463cf-87c5-4053-94a8-162e3c310e92" containerName="keystone-cron" Sep 29 14:02:08 crc kubenswrapper[4611]: I0929 14:02:08.972955 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="be7463cf-87c5-4053-94a8-162e3c310e92" containerName="keystone-cron" Sep 29 14:02:08 crc kubenswrapper[4611]: I0929 14:02:08.974425 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:08 crc kubenswrapper[4611]: I0929 14:02:08.995213 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-npx8p"] Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.119590 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983bac02-54cb-47f1-bc9d-4ff404002926-catalog-content\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.119823 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983bac02-54cb-47f1-bc9d-4ff404002926-utilities\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.120033 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2wgq\" (UniqueName: \"kubernetes.io/projected/983bac02-54cb-47f1-bc9d-4ff404002926-kube-api-access-x2wgq\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.222068 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983bac02-54cb-47f1-bc9d-4ff404002926-utilities\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.222146 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2wgq\" (UniqueName: \"kubernetes.io/projected/983bac02-54cb-47f1-bc9d-4ff404002926-kube-api-access-x2wgq\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.222219 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983bac02-54cb-47f1-bc9d-4ff404002926-catalog-content\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.222793 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983bac02-54cb-47f1-bc9d-4ff404002926-catalog-content\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.222802 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983bac02-54cb-47f1-bc9d-4ff404002926-utilities\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.254936 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2wgq\" (UniqueName: \"kubernetes.io/projected/983bac02-54cb-47f1-bc9d-4ff404002926-kube-api-access-x2wgq\") pod \"certified-operators-npx8p\" (UID: \"983bac02-54cb-47f1-bc9d-4ff404002926\") " pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.299552 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:09 crc kubenswrapper[4611]: I0929 14:02:09.910489 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-npx8p"] Sep 29 14:02:10 crc kubenswrapper[4611]: I0929 14:02:10.038551 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npx8p" event={"ID":"983bac02-54cb-47f1-bc9d-4ff404002926","Type":"ContainerStarted","Data":"e34cb1e5cdff5305b3c81637cc4902b6a2be7e341ec5007ac8872b8c49fe0c2f"} Sep 29 14:02:11 crc kubenswrapper[4611]: I0929 14:02:11.049304 4611 generic.go:334] "Generic (PLEG): container finished" podID="983bac02-54cb-47f1-bc9d-4ff404002926" containerID="7120eb4b88e1a31de2179ce51519ac7d86861e233dce8c6ca8784bccf389c239" exitCode=0 Sep 29 14:02:11 crc kubenswrapper[4611]: I0929 14:02:11.049396 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npx8p" event={"ID":"983bac02-54cb-47f1-bc9d-4ff404002926","Type":"ContainerDied","Data":"7120eb4b88e1a31de2179ce51519ac7d86861e233dce8c6ca8784bccf389c239"} Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.762018 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rl6hq"] Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.764404 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.808249 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rl6hq"] Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.891879 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ldjw\" (UniqueName: \"kubernetes.io/projected/4255642e-5fb7-4593-b838-1078b9be084b-kube-api-access-7ldjw\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.891993 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-utilities\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.892143 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-catalog-content\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.993769 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ldjw\" (UniqueName: \"kubernetes.io/projected/4255642e-5fb7-4593-b838-1078b9be084b-kube-api-access-7ldjw\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.993859 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-utilities\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:15 crc kubenswrapper[4611]: I0929 14:02:15.993932 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-catalog-content\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:16 crc kubenswrapper[4611]: I0929 14:02:16.001259 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-utilities\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:16 crc kubenswrapper[4611]: I0929 14:02:16.004518 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-catalog-content\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:16 crc kubenswrapper[4611]: I0929 14:02:16.226809 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ldjw\" (UniqueName: \"kubernetes.io/projected/4255642e-5fb7-4593-b838-1078b9be084b-kube-api-access-7ldjw\") pod \"redhat-operators-rl6hq\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:16 crc kubenswrapper[4611]: I0929 14:02:16.385129 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:19 crc kubenswrapper[4611]: I0929 14:02:19.560514 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rl6hq"] Sep 29 14:02:20 crc kubenswrapper[4611]: I0929 14:02:20.161262 4611 generic.go:334] "Generic (PLEG): container finished" podID="4255642e-5fb7-4593-b838-1078b9be084b" containerID="e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a" exitCode=0 Sep 29 14:02:20 crc kubenswrapper[4611]: I0929 14:02:20.161351 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerDied","Data":"e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a"} Sep 29 14:02:20 crc kubenswrapper[4611]: I0929 14:02:20.161704 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerStarted","Data":"899d856f4c82da5bda9f7f153565760c4f7cbe98a77644cea592c822990e3c19"} Sep 29 14:02:20 crc kubenswrapper[4611]: I0929 14:02:20.166417 4611 generic.go:334] "Generic (PLEG): container finished" podID="983bac02-54cb-47f1-bc9d-4ff404002926" containerID="e686c436ca9494e1700001e25b10c9246c9e869d796bf4de1d9a9d49e8901437" exitCode=0 Sep 29 14:02:20 crc kubenswrapper[4611]: I0929 14:02:20.166456 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npx8p" event={"ID":"983bac02-54cb-47f1-bc9d-4ff404002926","Type":"ContainerDied","Data":"e686c436ca9494e1700001e25b10c9246c9e869d796bf4de1d9a9d49e8901437"} Sep 29 14:02:22 crc kubenswrapper[4611]: I0929 14:02:22.194294 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-npx8p" event={"ID":"983bac02-54cb-47f1-bc9d-4ff404002926","Type":"ContainerStarted","Data":"cb2df85f654dd92b77ab74547027c1ce93ceaa80d023110e538c4c838781066c"} Sep 29 14:02:22 crc kubenswrapper[4611]: I0929 14:02:22.202175 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerStarted","Data":"b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d"} Sep 29 14:02:22 crc kubenswrapper[4611]: I0929 14:02:22.226699 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-npx8p" podStartSLOduration=3.62585156 podStartE2EDuration="14.226677898s" podCreationTimestamp="2025-09-29 14:02:08 +0000 UTC" firstStartedPulling="2025-09-29 14:02:11.051931403 +0000 UTC m=+4917.943451019" lastFinishedPulling="2025-09-29 14:02:21.652757751 +0000 UTC m=+4928.544277357" observedRunningTime="2025-09-29 14:02:22.214844785 +0000 UTC m=+4929.106364391" watchObservedRunningTime="2025-09-29 14:02:22.226677898 +0000 UTC m=+4929.118197514" Sep 29 14:02:27 crc kubenswrapper[4611]: I0929 14:02:27.256451 4611 generic.go:334] "Generic (PLEG): container finished" podID="4255642e-5fb7-4593-b838-1078b9be084b" containerID="b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d" exitCode=0 Sep 29 14:02:27 crc kubenswrapper[4611]: I0929 14:02:27.256532 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerDied","Data":"b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d"} Sep 29 14:02:28 crc kubenswrapper[4611]: I0929 14:02:28.269127 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerStarted","Data":"54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e"} Sep 29 14:02:28 crc kubenswrapper[4611]: I0929 14:02:28.306467 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rl6hq" podStartSLOduration=5.723329372 podStartE2EDuration="13.306448981s" podCreationTimestamp="2025-09-29 14:02:15 +0000 UTC" firstStartedPulling="2025-09-29 14:02:20.192738658 +0000 UTC m=+4927.084258264" lastFinishedPulling="2025-09-29 14:02:27.775858267 +0000 UTC m=+4934.667377873" observedRunningTime="2025-09-29 14:02:28.297133412 +0000 UTC m=+4935.188653038" watchObservedRunningTime="2025-09-29 14:02:28.306448981 +0000 UTC m=+4935.197968587" Sep 29 14:02:29 crc kubenswrapper[4611]: I0929 14:02:29.300036 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:29 crc kubenswrapper[4611]: I0929 14:02:29.301677 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:30 crc kubenswrapper[4611]: I0929 14:02:30.367176 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-npx8p" podUID="983bac02-54cb-47f1-bc9d-4ff404002926" containerName="registry-server" probeResult="failure" output=< Sep 29 14:02:30 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:02:30 crc kubenswrapper[4611]: > Sep 29 14:02:34 crc kubenswrapper[4611]: I0929 14:02:34.629348 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:02:34 crc kubenswrapper[4611]: I0929 14:02:34.629854 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:02:34 crc kubenswrapper[4611]: I0929 14:02:34.629912 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:02:34 crc kubenswrapper[4611]: I0929 14:02:34.630659 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4923a2c81c48e8c95b663bf0a126e00604c30dc0282e8f7fdb4ee29ec45d4c17"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:02:34 crc kubenswrapper[4611]: I0929 14:02:34.630713 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://4923a2c81c48e8c95b663bf0a126e00604c30dc0282e8f7fdb4ee29ec45d4c17" gracePeriod=600 Sep 29 14:02:35 crc kubenswrapper[4611]: I0929 14:02:35.331208 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="4923a2c81c48e8c95b663bf0a126e00604c30dc0282e8f7fdb4ee29ec45d4c17" exitCode=0 Sep 29 14:02:35 crc kubenswrapper[4611]: I0929 14:02:35.331300 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"4923a2c81c48e8c95b663bf0a126e00604c30dc0282e8f7fdb4ee29ec45d4c17"} Sep 29 14:02:35 crc kubenswrapper[4611]: I0929 14:02:35.331531 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6"} Sep 29 14:02:35 crc kubenswrapper[4611]: I0929 14:02:35.331554 4611 scope.go:117] "RemoveContainer" containerID="60fb87007b97aa07e5341abc93f56e64108f0799ccf00f276fdf2cebdcbad04e" Sep 29 14:02:36 crc kubenswrapper[4611]: I0929 14:02:36.386166 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:36 crc kubenswrapper[4611]: I0929 14:02:36.386476 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:37 crc kubenswrapper[4611]: I0929 14:02:37.448571 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rl6hq" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="registry-server" probeResult="failure" output=< Sep 29 14:02:37 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:02:37 crc kubenswrapper[4611]: > Sep 29 14:02:39 crc kubenswrapper[4611]: I0929 14:02:39.895169 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:39 crc kubenswrapper[4611]: I0929 14:02:39.954082 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-npx8p" Sep 29 14:02:40 crc kubenswrapper[4611]: I0929 14:02:40.054515 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-npx8p"] Sep 29 14:02:40 crc kubenswrapper[4611]: I0929 14:02:40.175070 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fghqn"] Sep 29 14:02:40 crc kubenswrapper[4611]: I0929 14:02:40.175314 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fghqn" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="registry-server" containerID="cri-o://51ab4924306d5123b3e833fb4afa6147b266cc9926266f4bbd239122ff02ac26" gracePeriod=2 Sep 29 14:02:40 crc kubenswrapper[4611]: I0929 14:02:40.383280 4611 generic.go:334] "Generic (PLEG): container finished" podID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerID="51ab4924306d5123b3e833fb4afa6147b266cc9926266f4bbd239122ff02ac26" exitCode=0 Sep 29 14:02:40 crc kubenswrapper[4611]: I0929 14:02:40.385389 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fghqn" event={"ID":"7f715f99-dbbf-4748-92eb-cd643708ff81","Type":"ContainerDied","Data":"51ab4924306d5123b3e833fb4afa6147b266cc9926266f4bbd239122ff02ac26"} Sep 29 14:02:40 crc kubenswrapper[4611]: I0929 14:02:40.952010 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.047071 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96lp6\" (UniqueName: \"kubernetes.io/projected/7f715f99-dbbf-4748-92eb-cd643708ff81-kube-api-access-96lp6\") pod \"7f715f99-dbbf-4748-92eb-cd643708ff81\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.047307 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-catalog-content\") pod \"7f715f99-dbbf-4748-92eb-cd643708ff81\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.047482 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-utilities\") pod \"7f715f99-dbbf-4748-92eb-cd643708ff81\" (UID: \"7f715f99-dbbf-4748-92eb-cd643708ff81\") " Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.051383 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-utilities" (OuterVolumeSpecName: "utilities") pod "7f715f99-dbbf-4748-92eb-cd643708ff81" (UID: "7f715f99-dbbf-4748-92eb-cd643708ff81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.056899 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f715f99-dbbf-4748-92eb-cd643708ff81-kube-api-access-96lp6" (OuterVolumeSpecName: "kube-api-access-96lp6") pod "7f715f99-dbbf-4748-92eb-cd643708ff81" (UID: "7f715f99-dbbf-4748-92eb-cd643708ff81"). InnerVolumeSpecName "kube-api-access-96lp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.131285 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f715f99-dbbf-4748-92eb-cd643708ff81" (UID: "7f715f99-dbbf-4748-92eb-cd643708ff81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.150598 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.150643 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96lp6\" (UniqueName: \"kubernetes.io/projected/7f715f99-dbbf-4748-92eb-cd643708ff81-kube-api-access-96lp6\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.150654 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f715f99-dbbf-4748-92eb-cd643708ff81-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.396239 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fghqn" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.396433 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fghqn" event={"ID":"7f715f99-dbbf-4748-92eb-cd643708ff81","Type":"ContainerDied","Data":"0de4406842d10d90c2bbe3688c96cea16ce11824a3b0acd97879566b67a74574"} Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.397097 4611 scope.go:117] "RemoveContainer" containerID="51ab4924306d5123b3e833fb4afa6147b266cc9926266f4bbd239122ff02ac26" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.427595 4611 scope.go:117] "RemoveContainer" containerID="1754eb1ac9d87a1211feef8668f275a7cb6b889c432b76748884cc6ba1c12556" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.452946 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fghqn"] Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.458995 4611 scope.go:117] "RemoveContainer" containerID="5a08e73e4f739cd68950613e851ef98739bd3d8feabbb298780e54ca438d440a" Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.470617 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fghqn"] Sep 29 14:02:41 crc kubenswrapper[4611]: I0929 14:02:41.749050 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" path="/var/lib/kubelet/pods/7f715f99-dbbf-4748-92eb-cd643708ff81/volumes" Sep 29 14:02:47 crc kubenswrapper[4611]: I0929 14:02:47.443388 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rl6hq" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="registry-server" probeResult="failure" output=< Sep 29 14:02:47 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:02:47 crc kubenswrapper[4611]: > Sep 29 14:02:56 crc kubenswrapper[4611]: I0929 14:02:56.485333 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:56 crc kubenswrapper[4611]: I0929 14:02:56.541118 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:56 crc kubenswrapper[4611]: I0929 14:02:56.722264 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rl6hq"] Sep 29 14:02:57 crc kubenswrapper[4611]: I0929 14:02:57.607991 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rl6hq" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="registry-server" containerID="cri-o://54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e" gracePeriod=2 Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.120415 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.298736 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-utilities\") pod \"4255642e-5fb7-4593-b838-1078b9be084b\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.298787 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ldjw\" (UniqueName: \"kubernetes.io/projected/4255642e-5fb7-4593-b838-1078b9be084b-kube-api-access-7ldjw\") pod \"4255642e-5fb7-4593-b838-1078b9be084b\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.298898 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-catalog-content\") pod \"4255642e-5fb7-4593-b838-1078b9be084b\" (UID: \"4255642e-5fb7-4593-b838-1078b9be084b\") " Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.300048 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-utilities" (OuterVolumeSpecName: "utilities") pod "4255642e-5fb7-4593-b838-1078b9be084b" (UID: "4255642e-5fb7-4593-b838-1078b9be084b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.309680 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4255642e-5fb7-4593-b838-1078b9be084b-kube-api-access-7ldjw" (OuterVolumeSpecName: "kube-api-access-7ldjw") pod "4255642e-5fb7-4593-b838-1078b9be084b" (UID: "4255642e-5fb7-4593-b838-1078b9be084b"). InnerVolumeSpecName "kube-api-access-7ldjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.375137 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4255642e-5fb7-4593-b838-1078b9be084b" (UID: "4255642e-5fb7-4593-b838-1078b9be084b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.401019 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.401050 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ldjw\" (UniqueName: \"kubernetes.io/projected/4255642e-5fb7-4593-b838-1078b9be084b-kube-api-access-7ldjw\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.401060 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4255642e-5fb7-4593-b838-1078b9be084b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.627054 4611 generic.go:334] "Generic (PLEG): container finished" podID="4255642e-5fb7-4593-b838-1078b9be084b" containerID="54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e" exitCode=0 Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.627102 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerDied","Data":"54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e"} Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.627135 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl6hq" event={"ID":"4255642e-5fb7-4593-b838-1078b9be084b","Type":"ContainerDied","Data":"899d856f4c82da5bda9f7f153565760c4f7cbe98a77644cea592c822990e3c19"} Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.627152 4611 scope.go:117] "RemoveContainer" containerID="54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.627153 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl6hq" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.665924 4611 scope.go:117] "RemoveContainer" containerID="b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.668702 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rl6hq"] Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.677294 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rl6hq"] Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.696144 4611 scope.go:117] "RemoveContainer" containerID="e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.776279 4611 scope.go:117] "RemoveContainer" containerID="54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e" Sep 29 14:02:58 crc kubenswrapper[4611]: E0929 14:02:58.777153 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e\": container with ID starting with 54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e not found: ID does not exist" containerID="54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.777197 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e"} err="failed to get container status \"54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e\": rpc error: code = NotFound desc = could not find container \"54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e\": container with ID starting with 54b05a56809311124bfd9e6cc5f63bde9df892d06d98e20963db0213e96c849e not found: ID does not exist" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.777229 4611 scope.go:117] "RemoveContainer" containerID="b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d" Sep 29 14:02:58 crc kubenswrapper[4611]: E0929 14:02:58.777531 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d\": container with ID starting with b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d not found: ID does not exist" containerID="b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.777554 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d"} err="failed to get container status \"b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d\": rpc error: code = NotFound desc = could not find container \"b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d\": container with ID starting with b2940d865677ce5222966ae9a5f4b9c4edb388578e6354700cb1bdf2be38e86d not found: ID does not exist" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.777568 4611 scope.go:117] "RemoveContainer" containerID="e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a" Sep 29 14:02:58 crc kubenswrapper[4611]: E0929 14:02:58.778112 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a\": container with ID starting with e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a not found: ID does not exist" containerID="e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a" Sep 29 14:02:58 crc kubenswrapper[4611]: I0929 14:02:58.778233 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a"} err="failed to get container status \"e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a\": rpc error: code = NotFound desc = could not find container \"e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a\": container with ID starting with e8b25132b62474cfa5280b8599faac39a0e92b45e9cc5ac90f29933a5cc3dc9a not found: ID does not exist" Sep 29 14:02:59 crc kubenswrapper[4611]: I0929 14:02:59.747988 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4255642e-5fb7-4593-b838-1078b9be084b" path="/var/lib/kubelet/pods/4255642e-5fb7-4593-b838-1078b9be084b/volumes" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.713930 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-27cxh"] Sep 29 14:03:16 crc kubenswrapper[4611]: E0929 14:03:16.714929 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="extract-utilities" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.714947 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="extract-utilities" Sep 29 14:03:16 crc kubenswrapper[4611]: E0929 14:03:16.714977 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="extract-utilities" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.714985 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="extract-utilities" Sep 29 14:03:16 crc kubenswrapper[4611]: E0929 14:03:16.715005 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="registry-server" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.715014 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="registry-server" Sep 29 14:03:16 crc kubenswrapper[4611]: E0929 14:03:16.715027 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="extract-content" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.715035 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="extract-content" Sep 29 14:03:16 crc kubenswrapper[4611]: E0929 14:03:16.715056 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="registry-server" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.715064 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="registry-server" Sep 29 14:03:16 crc kubenswrapper[4611]: E0929 14:03:16.715088 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="extract-content" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.715096 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="extract-content" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.715422 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="4255642e-5fb7-4593-b838-1078b9be084b" containerName="registry-server" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.715458 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f715f99-dbbf-4748-92eb-cd643708ff81" containerName="registry-server" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.718082 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.782219 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-27cxh"] Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.876890 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-utilities\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.877681 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmvjc\" (UniqueName: \"kubernetes.io/projected/263d0390-f3bf-4356-9d52-388bcd4e1e6b-kube-api-access-xmvjc\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.878194 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-catalog-content\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.980216 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-utilities\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.980289 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmvjc\" (UniqueName: \"kubernetes.io/projected/263d0390-f3bf-4356-9d52-388bcd4e1e6b-kube-api-access-xmvjc\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.980468 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-catalog-content\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.980804 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-utilities\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:16 crc kubenswrapper[4611]: I0929 14:03:16.980841 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-catalog-content\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:17 crc kubenswrapper[4611]: I0929 14:03:17.013477 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmvjc\" (UniqueName: \"kubernetes.io/projected/263d0390-f3bf-4356-9d52-388bcd4e1e6b-kube-api-access-xmvjc\") pod \"redhat-marketplace-27cxh\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:17 crc kubenswrapper[4611]: I0929 14:03:17.038535 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:17 crc kubenswrapper[4611]: I0929 14:03:17.571879 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-27cxh"] Sep 29 14:03:18 crc kubenswrapper[4611]: W0929 14:03:18.036491 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod263d0390_f3bf_4356_9d52_388bcd4e1e6b.slice/crio-bdb5c4fc1618236e2ed1fbb2177ae41e5b39d3b596766b5d18b549c8c41f0a31 WatchSource:0}: Error finding container bdb5c4fc1618236e2ed1fbb2177ae41e5b39d3b596766b5d18b549c8c41f0a31: Status 404 returned error can't find the container with id bdb5c4fc1618236e2ed1fbb2177ae41e5b39d3b596766b5d18b549c8c41f0a31 Sep 29 14:03:18 crc kubenswrapper[4611]: I0929 14:03:18.823443 4611 generic.go:334] "Generic (PLEG): container finished" podID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerID="a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a" exitCode=0 Sep 29 14:03:18 crc kubenswrapper[4611]: I0929 14:03:18.823934 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerDied","Data":"a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a"} Sep 29 14:03:18 crc kubenswrapper[4611]: I0929 14:03:18.824010 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerStarted","Data":"bdb5c4fc1618236e2ed1fbb2177ae41e5b39d3b596766b5d18b549c8c41f0a31"} Sep 29 14:03:18 crc kubenswrapper[4611]: I0929 14:03:18.830269 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:03:20 crc kubenswrapper[4611]: I0929 14:03:20.850689 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerStarted","Data":"3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b"} Sep 29 14:03:21 crc kubenswrapper[4611]: I0929 14:03:21.860296 4611 generic.go:334] "Generic (PLEG): container finished" podID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerID="3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b" exitCode=0 Sep 29 14:03:21 crc kubenswrapper[4611]: I0929 14:03:21.860372 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerDied","Data":"3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b"} Sep 29 14:03:22 crc kubenswrapper[4611]: I0929 14:03:22.873758 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerStarted","Data":"2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5"} Sep 29 14:03:22 crc kubenswrapper[4611]: I0929 14:03:22.896730 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-27cxh" podStartSLOduration=3.141917194 podStartE2EDuration="6.89671057s" podCreationTimestamp="2025-09-29 14:03:16 +0000 UTC" firstStartedPulling="2025-09-29 14:03:18.825570627 +0000 UTC m=+4985.717090233" lastFinishedPulling="2025-09-29 14:03:22.580363973 +0000 UTC m=+4989.471883609" observedRunningTime="2025-09-29 14:03:22.894313501 +0000 UTC m=+4989.785833097" watchObservedRunningTime="2025-09-29 14:03:22.89671057 +0000 UTC m=+4989.788230176" Sep 29 14:03:27 crc kubenswrapper[4611]: I0929 14:03:27.038957 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:27 crc kubenswrapper[4611]: I0929 14:03:27.039614 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:27 crc kubenswrapper[4611]: I0929 14:03:27.116188 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:27 crc kubenswrapper[4611]: I0929 14:03:27.984201 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:28 crc kubenswrapper[4611]: I0929 14:03:28.043523 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-27cxh"] Sep 29 14:03:29 crc kubenswrapper[4611]: I0929 14:03:29.942668 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-27cxh" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="registry-server" containerID="cri-o://2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5" gracePeriod=2 Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.469193 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.568992 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-catalog-content\") pod \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.569071 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-utilities\") pod \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.569254 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmvjc\" (UniqueName: \"kubernetes.io/projected/263d0390-f3bf-4356-9d52-388bcd4e1e6b-kube-api-access-xmvjc\") pod \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\" (UID: \"263d0390-f3bf-4356-9d52-388bcd4e1e6b\") " Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.570647 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-utilities" (OuterVolumeSpecName: "utilities") pod "263d0390-f3bf-4356-9d52-388bcd4e1e6b" (UID: "263d0390-f3bf-4356-9d52-388bcd4e1e6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.586127 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/263d0390-f3bf-4356-9d52-388bcd4e1e6b-kube-api-access-xmvjc" (OuterVolumeSpecName: "kube-api-access-xmvjc") pod "263d0390-f3bf-4356-9d52-388bcd4e1e6b" (UID: "263d0390-f3bf-4356-9d52-388bcd4e1e6b"). InnerVolumeSpecName "kube-api-access-xmvjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.602455 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "263d0390-f3bf-4356-9d52-388bcd4e1e6b" (UID: "263d0390-f3bf-4356-9d52-388bcd4e1e6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.671152 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmvjc\" (UniqueName: \"kubernetes.io/projected/263d0390-f3bf-4356-9d52-388bcd4e1e6b-kube-api-access-xmvjc\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.671452 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.671517 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/263d0390-f3bf-4356-9d52-388bcd4e1e6b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.955719 4611 generic.go:334] "Generic (PLEG): container finished" podID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerID="2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5" exitCode=0 Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.955771 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerDied","Data":"2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5"} Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.955801 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27cxh" event={"ID":"263d0390-f3bf-4356-9d52-388bcd4e1e6b","Type":"ContainerDied","Data":"bdb5c4fc1618236e2ed1fbb2177ae41e5b39d3b596766b5d18b549c8c41f0a31"} Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.955828 4611 scope.go:117] "RemoveContainer" containerID="2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.955967 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27cxh" Sep 29 14:03:30 crc kubenswrapper[4611]: I0929 14:03:30.993276 4611 scope.go:117] "RemoveContainer" containerID="3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.009257 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-27cxh"] Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.019052 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-27cxh"] Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.633724 4611 scope.go:117] "RemoveContainer" containerID="a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.749913 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" path="/var/lib/kubelet/pods/263d0390-f3bf-4356-9d52-388bcd4e1e6b/volumes" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.778949 4611 scope.go:117] "RemoveContainer" containerID="2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5" Sep 29 14:03:31 crc kubenswrapper[4611]: E0929 14:03:31.779439 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5\": container with ID starting with 2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5 not found: ID does not exist" containerID="2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.779500 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5"} err="failed to get container status \"2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5\": rpc error: code = NotFound desc = could not find container \"2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5\": container with ID starting with 2df30339ac028befed988c049f6672a406c0f9e1b18f696dbf331fcf29b067a5 not found: ID does not exist" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.779535 4611 scope.go:117] "RemoveContainer" containerID="3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b" Sep 29 14:03:31 crc kubenswrapper[4611]: E0929 14:03:31.779859 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b\": container with ID starting with 3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b not found: ID does not exist" containerID="3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.779887 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b"} err="failed to get container status \"3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b\": rpc error: code = NotFound desc = could not find container \"3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b\": container with ID starting with 3d25a15693e4f863f71541fa3ef37fb3a8984cecf4ba0a36162af5d64438f00b not found: ID does not exist" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.779909 4611 scope.go:117] "RemoveContainer" containerID="a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a" Sep 29 14:03:31 crc kubenswrapper[4611]: E0929 14:03:31.780135 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a\": container with ID starting with a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a not found: ID does not exist" containerID="a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a" Sep 29 14:03:31 crc kubenswrapper[4611]: I0929 14:03:31.780157 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a"} err="failed to get container status \"a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a\": rpc error: code = NotFound desc = could not find container \"a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a\": container with ID starting with a6e1432ca911b0b830faec44c9a742f318d4943490b2271ac61044004dd18d0a not found: ID does not exist" Sep 29 14:04:34 crc kubenswrapper[4611]: I0929 14:04:34.629013 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:04:34 crc kubenswrapper[4611]: I0929 14:04:34.629607 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:05:04 crc kubenswrapper[4611]: I0929 14:05:04.629291 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:05:04 crc kubenswrapper[4611]: I0929 14:05:04.630114 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:05:34 crc kubenswrapper[4611]: I0929 14:05:34.628932 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:05:34 crc kubenswrapper[4611]: I0929 14:05:34.629687 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:05:34 crc kubenswrapper[4611]: I0929 14:05:34.629753 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:05:34 crc kubenswrapper[4611]: I0929 14:05:34.630651 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:05:34 crc kubenswrapper[4611]: I0929 14:05:34.630724 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" gracePeriod=600 Sep 29 14:05:35 crc kubenswrapper[4611]: E0929 14:05:35.780405 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:05:36 crc kubenswrapper[4611]: I0929 14:05:36.284129 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" exitCode=0 Sep 29 14:05:36 crc kubenswrapper[4611]: I0929 14:05:36.284326 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6"} Sep 29 14:05:36 crc kubenswrapper[4611]: I0929 14:05:36.284558 4611 scope.go:117] "RemoveContainer" containerID="4923a2c81c48e8c95b663bf0a126e00604c30dc0282e8f7fdb4ee29ec45d4c17" Sep 29 14:05:36 crc kubenswrapper[4611]: I0929 14:05:36.285952 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:05:36 crc kubenswrapper[4611]: E0929 14:05:36.286445 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:05:47 crc kubenswrapper[4611]: I0929 14:05:47.736140 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:05:47 crc kubenswrapper[4611]: E0929 14:05:47.736904 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:05:55 crc kubenswrapper[4611]: I0929 14:05:55.932433 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-54fd444d4f-vmksq" podUID="cf23ea05-4538-4fed-bb3d-07d009f400bd" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Sep 29 14:06:01 crc kubenswrapper[4611]: I0929 14:06:01.737115 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:06:01 crc kubenswrapper[4611]: E0929 14:06:01.738508 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:06:15 crc kubenswrapper[4611]: I0929 14:06:15.736272 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:06:15 crc kubenswrapper[4611]: E0929 14:06:15.737564 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:06:27 crc kubenswrapper[4611]: I0929 14:06:27.736762 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:06:27 crc kubenswrapper[4611]: E0929 14:06:27.737943 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:06:41 crc kubenswrapper[4611]: I0929 14:06:41.736859 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:06:41 crc kubenswrapper[4611]: E0929 14:06:41.737971 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:06:55 crc kubenswrapper[4611]: I0929 14:06:55.737309 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:06:55 crc kubenswrapper[4611]: E0929 14:06:55.738464 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:07:10 crc kubenswrapper[4611]: I0929 14:07:10.736593 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:07:10 crc kubenswrapper[4611]: E0929 14:07:10.737723 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:07:23 crc kubenswrapper[4611]: I0929 14:07:23.747243 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:07:23 crc kubenswrapper[4611]: E0929 14:07:23.747995 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:07:38 crc kubenswrapper[4611]: I0929 14:07:38.736658 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:07:38 crc kubenswrapper[4611]: E0929 14:07:38.737696 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:07:42 crc kubenswrapper[4611]: E0929 14:07:42.246842 4611 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.74:44412->38.102.83.74:46265: read tcp 38.102.83.74:44412->38.102.83.74:46265: read: connection reset by peer Sep 29 14:07:52 crc kubenswrapper[4611]: I0929 14:07:52.736789 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:07:52 crc kubenswrapper[4611]: E0929 14:07:52.737722 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:08:06 crc kubenswrapper[4611]: I0929 14:08:06.736094 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:08:06 crc kubenswrapper[4611]: E0929 14:08:06.737018 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:08:17 crc kubenswrapper[4611]: I0929 14:08:17.736434 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:08:17 crc kubenswrapper[4611]: E0929 14:08:17.737234 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:08:31 crc kubenswrapper[4611]: I0929 14:08:31.737442 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:08:31 crc kubenswrapper[4611]: E0929 14:08:31.738819 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:08:45 crc kubenswrapper[4611]: I0929 14:08:45.736758 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:08:45 crc kubenswrapper[4611]: E0929 14:08:45.737662 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:09:00 crc kubenswrapper[4611]: I0929 14:09:00.737322 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:09:00 crc kubenswrapper[4611]: E0929 14:09:00.738287 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.409120 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wcjdz"] Sep 29 14:09:06 crc kubenswrapper[4611]: E0929 14:09:06.410293 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="registry-server" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.410315 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="registry-server" Sep 29 14:09:06 crc kubenswrapper[4611]: E0929 14:09:06.410337 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="extract-utilities" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.410352 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="extract-utilities" Sep 29 14:09:06 crc kubenswrapper[4611]: E0929 14:09:06.410386 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="extract-content" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.410400 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="extract-content" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.410854 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="263d0390-f3bf-4356-9d52-388bcd4e1e6b" containerName="registry-server" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.413603 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.435286 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wcjdz"] Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.602212 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvzxl\" (UniqueName: \"kubernetes.io/projected/23bf2867-1a46-4583-acab-20be6849d34f-kube-api-access-vvzxl\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.602603 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-catalog-content\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.602872 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-utilities\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.705145 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-utilities\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.705252 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvzxl\" (UniqueName: \"kubernetes.io/projected/23bf2867-1a46-4583-acab-20be6849d34f-kube-api-access-vvzxl\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.705306 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-catalog-content\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.705896 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-catalog-content\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.706389 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-utilities\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.743923 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvzxl\" (UniqueName: \"kubernetes.io/projected/23bf2867-1a46-4583-acab-20be6849d34f-kube-api-access-vvzxl\") pod \"community-operators-wcjdz\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:06 crc kubenswrapper[4611]: I0929 14:09:06.756002 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:07 crc kubenswrapper[4611]: I0929 14:09:07.114830 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wcjdz"] Sep 29 14:09:07 crc kubenswrapper[4611]: I0929 14:09:07.467044 4611 generic.go:334] "Generic (PLEG): container finished" podID="23bf2867-1a46-4583-acab-20be6849d34f" containerID="fcad41c2f594642037b53689ed355fdd6e13fc8f96419f5a9d8f6c6f52bdcc19" exitCode=0 Sep 29 14:09:07 crc kubenswrapper[4611]: I0929 14:09:07.467125 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerDied","Data":"fcad41c2f594642037b53689ed355fdd6e13fc8f96419f5a9d8f6c6f52bdcc19"} Sep 29 14:09:07 crc kubenswrapper[4611]: I0929 14:09:07.467329 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerStarted","Data":"fd3778d5f2711a7d6f0fa264ef629ff722e92f719df67d37719d4fc794011117"} Sep 29 14:09:07 crc kubenswrapper[4611]: I0929 14:09:07.470213 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:09:09 crc kubenswrapper[4611]: I0929 14:09:09.494775 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerStarted","Data":"df233fb6e5a7b7784006ea0500c0313a589becce9302e943509ceac0f912b801"} Sep 29 14:09:10 crc kubenswrapper[4611]: I0929 14:09:10.515219 4611 generic.go:334] "Generic (PLEG): container finished" podID="23bf2867-1a46-4583-acab-20be6849d34f" containerID="df233fb6e5a7b7784006ea0500c0313a589becce9302e943509ceac0f912b801" exitCode=0 Sep 29 14:09:10 crc kubenswrapper[4611]: I0929 14:09:10.515374 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerDied","Data":"df233fb6e5a7b7784006ea0500c0313a589becce9302e943509ceac0f912b801"} Sep 29 14:09:11 crc kubenswrapper[4611]: I0929 14:09:11.526941 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerStarted","Data":"a151d332e44f2ba7b60bcab6bde94b5ee9c5abd4ba808ca2e184ac297bbb20da"} Sep 29 14:09:11 crc kubenswrapper[4611]: I0929 14:09:11.560268 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wcjdz" podStartSLOduration=2.120247625 podStartE2EDuration="5.560246827s" podCreationTimestamp="2025-09-29 14:09:06 +0000 UTC" firstStartedPulling="2025-09-29 14:09:07.469883617 +0000 UTC m=+5334.361403233" lastFinishedPulling="2025-09-29 14:09:10.909882819 +0000 UTC m=+5337.801402435" observedRunningTime="2025-09-29 14:09:11.552153223 +0000 UTC m=+5338.443672839" watchObservedRunningTime="2025-09-29 14:09:11.560246827 +0000 UTC m=+5338.451766433" Sep 29 14:09:11 crc kubenswrapper[4611]: I0929 14:09:11.736169 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:09:11 crc kubenswrapper[4611]: E0929 14:09:11.736472 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:09:16 crc kubenswrapper[4611]: I0929 14:09:16.756683 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:16 crc kubenswrapper[4611]: I0929 14:09:16.757398 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:17 crc kubenswrapper[4611]: I0929 14:09:17.840703 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wcjdz" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="registry-server" probeResult="failure" output=< Sep 29 14:09:17 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:09:17 crc kubenswrapper[4611]: > Sep 29 14:09:23 crc kubenswrapper[4611]: I0929 14:09:23.743301 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:09:23 crc kubenswrapper[4611]: E0929 14:09:23.744087 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:09:26 crc kubenswrapper[4611]: I0929 14:09:26.813343 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:26 crc kubenswrapper[4611]: I0929 14:09:26.902551 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:27 crc kubenswrapper[4611]: I0929 14:09:27.087293 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wcjdz"] Sep 29 14:09:28 crc kubenswrapper[4611]: I0929 14:09:28.703608 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wcjdz" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="registry-server" containerID="cri-o://a151d332e44f2ba7b60bcab6bde94b5ee9c5abd4ba808ca2e184ac297bbb20da" gracePeriod=2 Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.716042 4611 generic.go:334] "Generic (PLEG): container finished" podID="23bf2867-1a46-4583-acab-20be6849d34f" containerID="a151d332e44f2ba7b60bcab6bde94b5ee9c5abd4ba808ca2e184ac297bbb20da" exitCode=0 Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.716119 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerDied","Data":"a151d332e44f2ba7b60bcab6bde94b5ee9c5abd4ba808ca2e184ac297bbb20da"} Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.854883 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.898323 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-utilities\") pod \"23bf2867-1a46-4583-acab-20be6849d34f\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.898427 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-catalog-content\") pod \"23bf2867-1a46-4583-acab-20be6849d34f\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.898479 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvzxl\" (UniqueName: \"kubernetes.io/projected/23bf2867-1a46-4583-acab-20be6849d34f-kube-api-access-vvzxl\") pod \"23bf2867-1a46-4583-acab-20be6849d34f\" (UID: \"23bf2867-1a46-4583-acab-20be6849d34f\") " Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.899474 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-utilities" (OuterVolumeSpecName: "utilities") pod "23bf2867-1a46-4583-acab-20be6849d34f" (UID: "23bf2867-1a46-4583-acab-20be6849d34f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.905956 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23bf2867-1a46-4583-acab-20be6849d34f-kube-api-access-vvzxl" (OuterVolumeSpecName: "kube-api-access-vvzxl") pod "23bf2867-1a46-4583-acab-20be6849d34f" (UID: "23bf2867-1a46-4583-acab-20be6849d34f"). InnerVolumeSpecName "kube-api-access-vvzxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:09:29 crc kubenswrapper[4611]: I0929 14:09:29.945186 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23bf2867-1a46-4583-acab-20be6849d34f" (UID: "23bf2867-1a46-4583-acab-20be6849d34f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.000899 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.000937 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bf2867-1a46-4583-acab-20be6849d34f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.000946 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvzxl\" (UniqueName: \"kubernetes.io/projected/23bf2867-1a46-4583-acab-20be6849d34f-kube-api-access-vvzxl\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.731799 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcjdz" event={"ID":"23bf2867-1a46-4583-acab-20be6849d34f","Type":"ContainerDied","Data":"fd3778d5f2711a7d6f0fa264ef629ff722e92f719df67d37719d4fc794011117"} Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.731856 4611 scope.go:117] "RemoveContainer" containerID="a151d332e44f2ba7b60bcab6bde94b5ee9c5abd4ba808ca2e184ac297bbb20da" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.731879 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wcjdz" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.760609 4611 scope.go:117] "RemoveContainer" containerID="df233fb6e5a7b7784006ea0500c0313a589becce9302e943509ceac0f912b801" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.804676 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wcjdz"] Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.809584 4611 scope.go:117] "RemoveContainer" containerID="fcad41c2f594642037b53689ed355fdd6e13fc8f96419f5a9d8f6c6f52bdcc19" Sep 29 14:09:30 crc kubenswrapper[4611]: I0929 14:09:30.816903 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wcjdz"] Sep 29 14:09:31 crc kubenswrapper[4611]: I0929 14:09:31.750645 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23bf2867-1a46-4583-acab-20be6849d34f" path="/var/lib/kubelet/pods/23bf2867-1a46-4583-acab-20be6849d34f/volumes" Sep 29 14:09:38 crc kubenswrapper[4611]: I0929 14:09:38.737744 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:09:38 crc kubenswrapper[4611]: E0929 14:09:38.738426 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:09:51 crc kubenswrapper[4611]: I0929 14:09:51.736300 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:09:51 crc kubenswrapper[4611]: E0929 14:09:51.737353 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:10:05 crc kubenswrapper[4611]: I0929 14:10:05.737092 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:10:05 crc kubenswrapper[4611]: E0929 14:10:05.738156 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:10:16 crc kubenswrapper[4611]: I0929 14:10:16.737161 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:10:16 crc kubenswrapper[4611]: E0929 14:10:16.738565 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:10:30 crc kubenswrapper[4611]: I0929 14:10:30.736924 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:10:30 crc kubenswrapper[4611]: E0929 14:10:30.737809 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:10:36 crc kubenswrapper[4611]: E0929 14:10:36.547131 4611 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.74:33280->38.102.83.74:46265: write tcp 38.102.83.74:33280->38.102.83.74:46265: write: connection reset by peer Sep 29 14:10:42 crc kubenswrapper[4611]: I0929 14:10:42.737328 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:10:43 crc kubenswrapper[4611]: I0929 14:10:43.524109 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"98b742a9eb7639247c36ce459a7072dcf8e7ea94fc6539d72b442103a328cbb1"} Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.869399 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jxxvb"] Sep 29 14:12:13 crc kubenswrapper[4611]: E0929 14:12:13.870365 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="extract-utilities" Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.870378 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="extract-utilities" Sep 29 14:12:13 crc kubenswrapper[4611]: E0929 14:12:13.870402 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="registry-server" Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.870408 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="registry-server" Sep 29 14:12:13 crc kubenswrapper[4611]: E0929 14:12:13.870429 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="extract-content" Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.870437 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="extract-content" Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.870617 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="23bf2867-1a46-4583-acab-20be6849d34f" containerName="registry-server" Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.873364 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:13 crc kubenswrapper[4611]: I0929 14:12:13.889854 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jxxvb"] Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.000359 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwllq\" (UniqueName: \"kubernetes.io/projected/08c090be-fa73-4676-b8af-a5edb230b09a-kube-api-access-fwllq\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.000586 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-utilities\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.000704 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-catalog-content\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.102577 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwllq\" (UniqueName: \"kubernetes.io/projected/08c090be-fa73-4676-b8af-a5edb230b09a-kube-api-access-fwllq\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.103051 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-utilities\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.103156 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-catalog-content\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.103505 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-utilities\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.103567 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-catalog-content\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.130531 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwllq\" (UniqueName: \"kubernetes.io/projected/08c090be-fa73-4676-b8af-a5edb230b09a-kube-api-access-fwllq\") pod \"certified-operators-jxxvb\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.201559 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:14 crc kubenswrapper[4611]: I0929 14:12:14.875947 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jxxvb"] Sep 29 14:12:15 crc kubenswrapper[4611]: I0929 14:12:15.426852 4611 generic.go:334] "Generic (PLEG): container finished" podID="08c090be-fa73-4676-b8af-a5edb230b09a" containerID="82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe" exitCode=0 Sep 29 14:12:15 crc kubenswrapper[4611]: I0929 14:12:15.426902 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerDied","Data":"82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe"} Sep 29 14:12:15 crc kubenswrapper[4611]: I0929 14:12:15.426935 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerStarted","Data":"d4f3547b15b01041566c8a07199b680f30ff2e83f3014ef5340dff0e2ad656de"} Sep 29 14:12:18 crc kubenswrapper[4611]: I0929 14:12:18.463978 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerStarted","Data":"451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053"} Sep 29 14:12:21 crc kubenswrapper[4611]: I0929 14:12:21.532197 4611 generic.go:334] "Generic (PLEG): container finished" podID="08c090be-fa73-4676-b8af-a5edb230b09a" containerID="451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053" exitCode=0 Sep 29 14:12:21 crc kubenswrapper[4611]: I0929 14:12:21.532275 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerDied","Data":"451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053"} Sep 29 14:12:23 crc kubenswrapper[4611]: I0929 14:12:23.552938 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerStarted","Data":"c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250"} Sep 29 14:12:23 crc kubenswrapper[4611]: I0929 14:12:23.578874 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jxxvb" podStartSLOduration=2.931517124 podStartE2EDuration="10.57885393s" podCreationTimestamp="2025-09-29 14:12:13 +0000 UTC" firstStartedPulling="2025-09-29 14:12:15.428459017 +0000 UTC m=+5522.319978613" lastFinishedPulling="2025-09-29 14:12:23.075795813 +0000 UTC m=+5529.967315419" observedRunningTime="2025-09-29 14:12:23.575794401 +0000 UTC m=+5530.467314017" watchObservedRunningTime="2025-09-29 14:12:23.57885393 +0000 UTC m=+5530.470373536" Sep 29 14:12:24 crc kubenswrapper[4611]: I0929 14:12:24.202235 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:24 crc kubenswrapper[4611]: I0929 14:12:24.202292 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:25 crc kubenswrapper[4611]: I0929 14:12:25.252699 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jxxvb" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="registry-server" probeResult="failure" output=< Sep 29 14:12:25 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:12:25 crc kubenswrapper[4611]: > Sep 29 14:12:26 crc kubenswrapper[4611]: I0929 14:12:26.953816 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hcb5f"] Sep 29 14:12:26 crc kubenswrapper[4611]: I0929 14:12:26.956939 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:26 crc kubenswrapper[4611]: I0929 14:12:26.964119 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hcb5f"] Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.116481 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-utilities\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.116535 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6s5l\" (UniqueName: \"kubernetes.io/projected/08c1a016-0213-4199-b91a-49d3e1425afc-kube-api-access-s6s5l\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.116732 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-catalog-content\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.219076 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-utilities\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.219153 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6s5l\" (UniqueName: \"kubernetes.io/projected/08c1a016-0213-4199-b91a-49d3e1425afc-kube-api-access-s6s5l\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.219231 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-catalog-content\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.220001 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-catalog-content\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.220058 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-utilities\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.251547 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6s5l\" (UniqueName: \"kubernetes.io/projected/08c1a016-0213-4199-b91a-49d3e1425afc-kube-api-access-s6s5l\") pod \"redhat-operators-hcb5f\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.274706 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:27 crc kubenswrapper[4611]: I0929 14:12:27.947290 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hcb5f"] Sep 29 14:12:28 crc kubenswrapper[4611]: I0929 14:12:28.622207 4611 generic.go:334] "Generic (PLEG): container finished" podID="08c1a016-0213-4199-b91a-49d3e1425afc" containerID="cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c" exitCode=0 Sep 29 14:12:28 crc kubenswrapper[4611]: I0929 14:12:28.622276 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerDied","Data":"cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c"} Sep 29 14:12:28 crc kubenswrapper[4611]: I0929 14:12:28.622655 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerStarted","Data":"4c0505cbf167dc1c848038eedcf9b45673afd2a0f89aa384ac05b8d338e85f3b"} Sep 29 14:12:29 crc kubenswrapper[4611]: I0929 14:12:29.632747 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerStarted","Data":"31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7"} Sep 29 14:12:33 crc kubenswrapper[4611]: I0929 14:12:33.669162 4611 generic.go:334] "Generic (PLEG): container finished" podID="08c1a016-0213-4199-b91a-49d3e1425afc" containerID="31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7" exitCode=0 Sep 29 14:12:33 crc kubenswrapper[4611]: I0929 14:12:33.669240 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerDied","Data":"31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7"} Sep 29 14:12:34 crc kubenswrapper[4611]: I0929 14:12:34.254435 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:34 crc kubenswrapper[4611]: I0929 14:12:34.305945 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:34 crc kubenswrapper[4611]: I0929 14:12:34.913746 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jxxvb"] Sep 29 14:12:35 crc kubenswrapper[4611]: I0929 14:12:35.697418 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerStarted","Data":"70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02"} Sep 29 14:12:35 crc kubenswrapper[4611]: I0929 14:12:35.697567 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jxxvb" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="registry-server" containerID="cri-o://c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250" gracePeriod=2 Sep 29 14:12:35 crc kubenswrapper[4611]: I0929 14:12:35.720439 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hcb5f" podStartSLOduration=3.20758569 podStartE2EDuration="9.720419536s" podCreationTimestamp="2025-09-29 14:12:26 +0000 UTC" firstStartedPulling="2025-09-29 14:12:28.625474795 +0000 UTC m=+5535.516994401" lastFinishedPulling="2025-09-29 14:12:35.138308641 +0000 UTC m=+5542.029828247" observedRunningTime="2025-09-29 14:12:35.717743358 +0000 UTC m=+5542.609262974" watchObservedRunningTime="2025-09-29 14:12:35.720419536 +0000 UTC m=+5542.611939142" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.394745 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.477609 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwllq\" (UniqueName: \"kubernetes.io/projected/08c090be-fa73-4676-b8af-a5edb230b09a-kube-api-access-fwllq\") pod \"08c090be-fa73-4676-b8af-a5edb230b09a\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.478068 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-catalog-content\") pod \"08c090be-fa73-4676-b8af-a5edb230b09a\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.478243 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-utilities\") pod \"08c090be-fa73-4676-b8af-a5edb230b09a\" (UID: \"08c090be-fa73-4676-b8af-a5edb230b09a\") " Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.478941 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-utilities" (OuterVolumeSpecName: "utilities") pod "08c090be-fa73-4676-b8af-a5edb230b09a" (UID: "08c090be-fa73-4676-b8af-a5edb230b09a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.487016 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08c090be-fa73-4676-b8af-a5edb230b09a-kube-api-access-fwllq" (OuterVolumeSpecName: "kube-api-access-fwllq") pod "08c090be-fa73-4676-b8af-a5edb230b09a" (UID: "08c090be-fa73-4676-b8af-a5edb230b09a"). InnerVolumeSpecName "kube-api-access-fwllq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.542365 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08c090be-fa73-4676-b8af-a5edb230b09a" (UID: "08c090be-fa73-4676-b8af-a5edb230b09a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.580400 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.580434 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c090be-fa73-4676-b8af-a5edb230b09a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.580444 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwllq\" (UniqueName: \"kubernetes.io/projected/08c090be-fa73-4676-b8af-a5edb230b09a-kube-api-access-fwllq\") on node \"crc\" DevicePath \"\"" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.708354 4611 generic.go:334] "Generic (PLEG): container finished" podID="08c090be-fa73-4676-b8af-a5edb230b09a" containerID="c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250" exitCode=0 Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.708393 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerDied","Data":"c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250"} Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.708420 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jxxvb" event={"ID":"08c090be-fa73-4676-b8af-a5edb230b09a","Type":"ContainerDied","Data":"d4f3547b15b01041566c8a07199b680f30ff2e83f3014ef5340dff0e2ad656de"} Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.708434 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jxxvb" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.708476 4611 scope.go:117] "RemoveContainer" containerID="c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.733252 4611 scope.go:117] "RemoveContainer" containerID="451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.751357 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jxxvb"] Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.763191 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jxxvb"] Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.797418 4611 scope.go:117] "RemoveContainer" containerID="82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.835668 4611 scope.go:117] "RemoveContainer" containerID="c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250" Sep 29 14:12:36 crc kubenswrapper[4611]: E0929 14:12:36.836103 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250\": container with ID starting with c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250 not found: ID does not exist" containerID="c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.836134 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250"} err="failed to get container status \"c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250\": rpc error: code = NotFound desc = could not find container \"c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250\": container with ID starting with c64bdbbdc7c2bfa89c0c3d717d171bd541cf0f1e4d0eb9a2d024919c9f35e250 not found: ID does not exist" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.836154 4611 scope.go:117] "RemoveContainer" containerID="451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053" Sep 29 14:12:36 crc kubenswrapper[4611]: E0929 14:12:36.836711 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053\": container with ID starting with 451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053 not found: ID does not exist" containerID="451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.836750 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053"} err="failed to get container status \"451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053\": rpc error: code = NotFound desc = could not find container \"451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053\": container with ID starting with 451d666a7f3ae94a0dd86d354f8edf659d6cc751235bbb76aa4a337dc19b2053 not found: ID does not exist" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.836763 4611 scope.go:117] "RemoveContainer" containerID="82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe" Sep 29 14:12:36 crc kubenswrapper[4611]: E0929 14:12:36.837120 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe\": container with ID starting with 82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe not found: ID does not exist" containerID="82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe" Sep 29 14:12:36 crc kubenswrapper[4611]: I0929 14:12:36.837157 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe"} err="failed to get container status \"82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe\": rpc error: code = NotFound desc = could not find container \"82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe\": container with ID starting with 82221f7a4281bbf4e95b750b712abad7bfb10565fea662b9a6717c4064ade9fe not found: ID does not exist" Sep 29 14:12:37 crc kubenswrapper[4611]: I0929 14:12:37.275642 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:37 crc kubenswrapper[4611]: I0929 14:12:37.275693 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:12:37 crc kubenswrapper[4611]: I0929 14:12:37.746798 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" path="/var/lib/kubelet/pods/08c090be-fa73-4676-b8af-a5edb230b09a/volumes" Sep 29 14:12:38 crc kubenswrapper[4611]: I0929 14:12:38.323632 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hcb5f" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" probeResult="failure" output=< Sep 29 14:12:38 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:12:38 crc kubenswrapper[4611]: > Sep 29 14:12:48 crc kubenswrapper[4611]: I0929 14:12:48.340512 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hcb5f" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" probeResult="failure" output=< Sep 29 14:12:48 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:12:48 crc kubenswrapper[4611]: > Sep 29 14:12:58 crc kubenswrapper[4611]: I0929 14:12:58.323248 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hcb5f" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" probeResult="failure" output=< Sep 29 14:12:58 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:12:58 crc kubenswrapper[4611]: > Sep 29 14:13:04 crc kubenswrapper[4611]: I0929 14:13:04.628975 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:13:04 crc kubenswrapper[4611]: I0929 14:13:04.629437 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:13:07 crc kubenswrapper[4611]: I0929 14:13:07.343120 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:13:07 crc kubenswrapper[4611]: I0929 14:13:07.420795 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:13:07 crc kubenswrapper[4611]: I0929 14:13:07.591909 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hcb5f"] Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.038851 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hcb5f" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" containerID="cri-o://70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02" gracePeriod=2 Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.609970 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.637400 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6s5l\" (UniqueName: \"kubernetes.io/projected/08c1a016-0213-4199-b91a-49d3e1425afc-kube-api-access-s6s5l\") pod \"08c1a016-0213-4199-b91a-49d3e1425afc\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.637442 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-utilities\") pod \"08c1a016-0213-4199-b91a-49d3e1425afc\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.637526 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-catalog-content\") pod \"08c1a016-0213-4199-b91a-49d3e1425afc\" (UID: \"08c1a016-0213-4199-b91a-49d3e1425afc\") " Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.644904 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-utilities" (OuterVolumeSpecName: "utilities") pod "08c1a016-0213-4199-b91a-49d3e1425afc" (UID: "08c1a016-0213-4199-b91a-49d3e1425afc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.654016 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08c1a016-0213-4199-b91a-49d3e1425afc-kube-api-access-s6s5l" (OuterVolumeSpecName: "kube-api-access-s6s5l") pod "08c1a016-0213-4199-b91a-49d3e1425afc" (UID: "08c1a016-0213-4199-b91a-49d3e1425afc"). InnerVolumeSpecName "kube-api-access-s6s5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.740830 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6s5l\" (UniqueName: \"kubernetes.io/projected/08c1a016-0213-4199-b91a-49d3e1425afc-kube-api-access-s6s5l\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.740885 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.751600 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08c1a016-0213-4199-b91a-49d3e1425afc" (UID: "08c1a016-0213-4199-b91a-49d3e1425afc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:13:09 crc kubenswrapper[4611]: I0929 14:13:09.843274 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08c1a016-0213-4199-b91a-49d3e1425afc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.051906 4611 generic.go:334] "Generic (PLEG): container finished" podID="08c1a016-0213-4199-b91a-49d3e1425afc" containerID="70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02" exitCode=0 Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.051968 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerDied","Data":"70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02"} Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.052345 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcb5f" event={"ID":"08c1a016-0213-4199-b91a-49d3e1425afc","Type":"ContainerDied","Data":"4c0505cbf167dc1c848038eedcf9b45673afd2a0f89aa384ac05b8d338e85f3b"} Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.052021 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcb5f" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.052371 4611 scope.go:117] "RemoveContainer" containerID="70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.095892 4611 scope.go:117] "RemoveContainer" containerID="31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.096653 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hcb5f"] Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.116418 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hcb5f"] Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.123969 4611 scope.go:117] "RemoveContainer" containerID="cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.194795 4611 scope.go:117] "RemoveContainer" containerID="70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02" Sep 29 14:13:10 crc kubenswrapper[4611]: E0929 14:13:10.195312 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02\": container with ID starting with 70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02 not found: ID does not exist" containerID="70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.195363 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02"} err="failed to get container status \"70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02\": rpc error: code = NotFound desc = could not find container \"70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02\": container with ID starting with 70a25cd4d2f9718c52178b1433f71ec3666bac90e56c2c556d923c452a976b02 not found: ID does not exist" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.195390 4611 scope.go:117] "RemoveContainer" containerID="31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7" Sep 29 14:13:10 crc kubenswrapper[4611]: E0929 14:13:10.195915 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7\": container with ID starting with 31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7 not found: ID does not exist" containerID="31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.195969 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7"} err="failed to get container status \"31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7\": rpc error: code = NotFound desc = could not find container \"31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7\": container with ID starting with 31d4fb08eb82ba803e21ccb01d8774ab20cd80cb1fa91d27ff8c7c7b5ef0cab7 not found: ID does not exist" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.196006 4611 scope.go:117] "RemoveContainer" containerID="cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c" Sep 29 14:13:10 crc kubenswrapper[4611]: E0929 14:13:10.196350 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c\": container with ID starting with cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c not found: ID does not exist" containerID="cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c" Sep 29 14:13:10 crc kubenswrapper[4611]: I0929 14:13:10.196395 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c"} err="failed to get container status \"cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c\": rpc error: code = NotFound desc = could not find container \"cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c\": container with ID starting with cb207f89c72a52b8eb73388032076d0db05f057df41c9b2966fe9301ca1bce4c not found: ID does not exist" Sep 29 14:13:11 crc kubenswrapper[4611]: I0929 14:13:11.753086 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" path="/var/lib/kubelet/pods/08c1a016-0213-4199-b91a-49d3e1425afc/volumes" Sep 29 14:13:34 crc kubenswrapper[4611]: I0929 14:13:34.628991 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:13:34 crc kubenswrapper[4611]: I0929 14:13:34.630785 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.803038 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gpgwl"] Sep 29 14:13:38 crc kubenswrapper[4611]: E0929 14:13:38.804253 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="extract-utilities" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804272 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="extract-utilities" Sep 29 14:13:38 crc kubenswrapper[4611]: E0929 14:13:38.804292 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="extract-content" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804302 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="extract-content" Sep 29 14:13:38 crc kubenswrapper[4611]: E0929 14:13:38.804321 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="extract-content" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804331 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="extract-content" Sep 29 14:13:38 crc kubenswrapper[4611]: E0929 14:13:38.804346 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804356 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" Sep 29 14:13:38 crc kubenswrapper[4611]: E0929 14:13:38.804384 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="extract-utilities" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804393 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="extract-utilities" Sep 29 14:13:38 crc kubenswrapper[4611]: E0929 14:13:38.804410 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="registry-server" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804418 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="registry-server" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804702 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c090be-fa73-4676-b8af-a5edb230b09a" containerName="registry-server" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.804719 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c1a016-0213-4199-b91a-49d3e1425afc" containerName="registry-server" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.806827 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.817894 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpgwl"] Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.991717 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t6jv\" (UniqueName: \"kubernetes.io/projected/49369e4a-c29f-4a6a-a960-f4adb2e4c922-kube-api-access-6t6jv\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.991801 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-catalog-content\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:38 crc kubenswrapper[4611]: I0929 14:13:38.992565 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-utilities\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.094278 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-utilities\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.094820 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-utilities\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.095104 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t6jv\" (UniqueName: \"kubernetes.io/projected/49369e4a-c29f-4a6a-a960-f4adb2e4c922-kube-api-access-6t6jv\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.095266 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-catalog-content\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.095571 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-catalog-content\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.118283 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t6jv\" (UniqueName: \"kubernetes.io/projected/49369e4a-c29f-4a6a-a960-f4adb2e4c922-kube-api-access-6t6jv\") pod \"redhat-marketplace-gpgwl\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.132350 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:39 crc kubenswrapper[4611]: I0929 14:13:39.631405 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpgwl"] Sep 29 14:13:40 crc kubenswrapper[4611]: I0929 14:13:40.405353 4611 generic.go:334] "Generic (PLEG): container finished" podID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerID="d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab" exitCode=0 Sep 29 14:13:40 crc kubenswrapper[4611]: I0929 14:13:40.405441 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpgwl" event={"ID":"49369e4a-c29f-4a6a-a960-f4adb2e4c922","Type":"ContainerDied","Data":"d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab"} Sep 29 14:13:40 crc kubenswrapper[4611]: I0929 14:13:40.406214 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpgwl" event={"ID":"49369e4a-c29f-4a6a-a960-f4adb2e4c922","Type":"ContainerStarted","Data":"eac4b14a6d20c5e978095aff958c7eb3b5962e2541c15e054d947c8927d034c7"} Sep 29 14:13:42 crc kubenswrapper[4611]: I0929 14:13:42.429970 4611 generic.go:334] "Generic (PLEG): container finished" podID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerID="28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c" exitCode=0 Sep 29 14:13:42 crc kubenswrapper[4611]: I0929 14:13:42.430105 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpgwl" event={"ID":"49369e4a-c29f-4a6a-a960-f4adb2e4c922","Type":"ContainerDied","Data":"28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c"} Sep 29 14:13:43 crc kubenswrapper[4611]: I0929 14:13:43.445480 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpgwl" event={"ID":"49369e4a-c29f-4a6a-a960-f4adb2e4c922","Type":"ContainerStarted","Data":"64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b"} Sep 29 14:13:43 crc kubenswrapper[4611]: I0929 14:13:43.479855 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gpgwl" podStartSLOduration=3.018681784 podStartE2EDuration="5.479829409s" podCreationTimestamp="2025-09-29 14:13:38 +0000 UTC" firstStartedPulling="2025-09-29 14:13:40.408514758 +0000 UTC m=+5607.300034364" lastFinishedPulling="2025-09-29 14:13:42.869662383 +0000 UTC m=+5609.761181989" observedRunningTime="2025-09-29 14:13:43.468579653 +0000 UTC m=+5610.360099279" watchObservedRunningTime="2025-09-29 14:13:43.479829409 +0000 UTC m=+5610.371349035" Sep 29 14:13:49 crc kubenswrapper[4611]: I0929 14:13:49.133173 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:49 crc kubenswrapper[4611]: I0929 14:13:49.133738 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:49 crc kubenswrapper[4611]: I0929 14:13:49.188729 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:49 crc kubenswrapper[4611]: I0929 14:13:49.561073 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:49 crc kubenswrapper[4611]: I0929 14:13:49.613700 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpgwl"] Sep 29 14:13:51 crc kubenswrapper[4611]: I0929 14:13:51.531856 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gpgwl" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="registry-server" containerID="cri-o://64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b" gracePeriod=2 Sep 29 14:13:51 crc kubenswrapper[4611]: I0929 14:13:51.978826 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.070985 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-utilities\") pod \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.071134 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6t6jv\" (UniqueName: \"kubernetes.io/projected/49369e4a-c29f-4a6a-a960-f4adb2e4c922-kube-api-access-6t6jv\") pod \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.071209 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-catalog-content\") pod \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\" (UID: \"49369e4a-c29f-4a6a-a960-f4adb2e4c922\") " Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.071876 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-utilities" (OuterVolumeSpecName: "utilities") pod "49369e4a-c29f-4a6a-a960-f4adb2e4c922" (UID: "49369e4a-c29f-4a6a-a960-f4adb2e4c922"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.072364 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.076311 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49369e4a-c29f-4a6a-a960-f4adb2e4c922-kube-api-access-6t6jv" (OuterVolumeSpecName: "kube-api-access-6t6jv") pod "49369e4a-c29f-4a6a-a960-f4adb2e4c922" (UID: "49369e4a-c29f-4a6a-a960-f4adb2e4c922"). InnerVolumeSpecName "kube-api-access-6t6jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.088900 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49369e4a-c29f-4a6a-a960-f4adb2e4c922" (UID: "49369e4a-c29f-4a6a-a960-f4adb2e4c922"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.173925 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6t6jv\" (UniqueName: \"kubernetes.io/projected/49369e4a-c29f-4a6a-a960-f4adb2e4c922-kube-api-access-6t6jv\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.173954 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49369e4a-c29f-4a6a-a960-f4adb2e4c922-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.550105 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpgwl" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.550138 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpgwl" event={"ID":"49369e4a-c29f-4a6a-a960-f4adb2e4c922","Type":"ContainerDied","Data":"64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b"} Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.550234 4611 scope.go:117] "RemoveContainer" containerID="64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.550051 4611 generic.go:334] "Generic (PLEG): container finished" podID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerID="64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b" exitCode=0 Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.550487 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpgwl" event={"ID":"49369e4a-c29f-4a6a-a960-f4adb2e4c922","Type":"ContainerDied","Data":"eac4b14a6d20c5e978095aff958c7eb3b5962e2541c15e054d947c8927d034c7"} Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.581316 4611 scope.go:117] "RemoveContainer" containerID="28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.603321 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpgwl"] Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.608318 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpgwl"] Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.616986 4611 scope.go:117] "RemoveContainer" containerID="d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.666317 4611 scope.go:117] "RemoveContainer" containerID="64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b" Sep 29 14:13:52 crc kubenswrapper[4611]: E0929 14:13:52.666990 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b\": container with ID starting with 64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b not found: ID does not exist" containerID="64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.667063 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b"} err="failed to get container status \"64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b\": rpc error: code = NotFound desc = could not find container \"64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b\": container with ID starting with 64c5393f715d80d5d5296f666ea4882da892e498959374032f5cbe046e87304b not found: ID does not exist" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.667104 4611 scope.go:117] "RemoveContainer" containerID="28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c" Sep 29 14:13:52 crc kubenswrapper[4611]: E0929 14:13:52.667699 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c\": container with ID starting with 28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c not found: ID does not exist" containerID="28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.667742 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c"} err="failed to get container status \"28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c\": rpc error: code = NotFound desc = could not find container \"28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c\": container with ID starting with 28ca322caffe5af5f1fed7eae8f731e1e17a043e0561b67de4852efec94c8b2c not found: ID does not exist" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.667773 4611 scope.go:117] "RemoveContainer" containerID="d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab" Sep 29 14:13:52 crc kubenswrapper[4611]: E0929 14:13:52.668018 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab\": container with ID starting with d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab not found: ID does not exist" containerID="d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab" Sep 29 14:13:52 crc kubenswrapper[4611]: I0929 14:13:52.668051 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab"} err="failed to get container status \"d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab\": rpc error: code = NotFound desc = could not find container \"d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab\": container with ID starting with d0a73a9c525f4fffe84997f476e1934e87c246ba5822840bc5fb9b12881c8cab not found: ID does not exist" Sep 29 14:13:53 crc kubenswrapper[4611]: I0929 14:13:53.762175 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" path="/var/lib/kubelet/pods/49369e4a-c29f-4a6a-a960-f4adb2e4c922/volumes" Sep 29 14:14:04 crc kubenswrapper[4611]: I0929 14:14:04.628556 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:14:04 crc kubenswrapper[4611]: I0929 14:14:04.629602 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:14:04 crc kubenswrapper[4611]: I0929 14:14:04.629745 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:14:04 crc kubenswrapper[4611]: I0929 14:14:04.631321 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"98b742a9eb7639247c36ce459a7072dcf8e7ea94fc6539d72b442103a328cbb1"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:14:04 crc kubenswrapper[4611]: I0929 14:14:04.631475 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://98b742a9eb7639247c36ce459a7072dcf8e7ea94fc6539d72b442103a328cbb1" gracePeriod=600 Sep 29 14:14:05 crc kubenswrapper[4611]: I0929 14:14:05.680197 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="98b742a9eb7639247c36ce459a7072dcf8e7ea94fc6539d72b442103a328cbb1" exitCode=0 Sep 29 14:14:05 crc kubenswrapper[4611]: I0929 14:14:05.680350 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"98b742a9eb7639247c36ce459a7072dcf8e7ea94fc6539d72b442103a328cbb1"} Sep 29 14:14:05 crc kubenswrapper[4611]: I0929 14:14:05.680586 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb"} Sep 29 14:14:05 crc kubenswrapper[4611]: I0929 14:14:05.680605 4611 scope.go:117] "RemoveContainer" containerID="b79580933ed017a71c232abdd53a9b3033e65d063aa126b5a24990b89c98ded6" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.175379 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5"] Sep 29 14:15:00 crc kubenswrapper[4611]: E0929 14:15:00.176397 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="extract-utilities" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.176412 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="extract-utilities" Sep 29 14:15:00 crc kubenswrapper[4611]: E0929 14:15:00.176444 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="extract-content" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.176454 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="extract-content" Sep 29 14:15:00 crc kubenswrapper[4611]: E0929 14:15:00.176490 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="registry-server" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.176498 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="registry-server" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.176735 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="49369e4a-c29f-4a6a-a960-f4adb2e4c922" containerName="registry-server" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.177569 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.191836 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5"] Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.209372 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.217779 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.308604 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-config-volume\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.309060 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-secret-volume\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.309088 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgpz2\" (UniqueName: \"kubernetes.io/projected/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-kube-api-access-sgpz2\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.411028 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-config-volume\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.411824 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-secret-volume\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.411780 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-config-volume\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.411849 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgpz2\" (UniqueName: \"kubernetes.io/projected/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-kube-api-access-sgpz2\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.425537 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-secret-volume\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.435199 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgpz2\" (UniqueName: \"kubernetes.io/projected/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-kube-api-access-sgpz2\") pod \"collect-profiles-29319255-7fzk5\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.520273 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:00 crc kubenswrapper[4611]: I0929 14:15:00.995936 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5"] Sep 29 14:15:01 crc kubenswrapper[4611]: I0929 14:15:01.320762 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" event={"ID":"0c4754e0-7cd3-4fe6-8f5f-2cd209764619","Type":"ContainerStarted","Data":"6d9922a486d6b65eb39cf4774ec2f003ae7e044189c9c31f17a584a272fc5bfe"} Sep 29 14:15:01 crc kubenswrapper[4611]: I0929 14:15:01.321076 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" event={"ID":"0c4754e0-7cd3-4fe6-8f5f-2cd209764619","Type":"ContainerStarted","Data":"c04be318047bc9108597f0fb63ff33678eaf73ebd3d112d1836b211e077e0797"} Sep 29 14:15:01 crc kubenswrapper[4611]: I0929 14:15:01.344167 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" podStartSLOduration=1.344147482 podStartE2EDuration="1.344147482s" podCreationTimestamp="2025-09-29 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:15:01.338792727 +0000 UTC m=+5688.230312333" watchObservedRunningTime="2025-09-29 14:15:01.344147482 +0000 UTC m=+5688.235667088" Sep 29 14:15:02 crc kubenswrapper[4611]: I0929 14:15:02.333574 4611 generic.go:334] "Generic (PLEG): container finished" podID="0c4754e0-7cd3-4fe6-8f5f-2cd209764619" containerID="6d9922a486d6b65eb39cf4774ec2f003ae7e044189c9c31f17a584a272fc5bfe" exitCode=0 Sep 29 14:15:02 crc kubenswrapper[4611]: I0929 14:15:02.333664 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" event={"ID":"0c4754e0-7cd3-4fe6-8f5f-2cd209764619","Type":"ContainerDied","Data":"6d9922a486d6b65eb39cf4774ec2f003ae7e044189c9c31f17a584a272fc5bfe"} Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.709804 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.783600 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-secret-volume\") pod \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.783808 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-config-volume\") pod \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.783863 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgpz2\" (UniqueName: \"kubernetes.io/projected/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-kube-api-access-sgpz2\") pod \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\" (UID: \"0c4754e0-7cd3-4fe6-8f5f-2cd209764619\") " Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.784510 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-config-volume" (OuterVolumeSpecName: "config-volume") pod "0c4754e0-7cd3-4fe6-8f5f-2cd209764619" (UID: "0c4754e0-7cd3-4fe6-8f5f-2cd209764619"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.790709 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-kube-api-access-sgpz2" (OuterVolumeSpecName: "kube-api-access-sgpz2") pod "0c4754e0-7cd3-4fe6-8f5f-2cd209764619" (UID: "0c4754e0-7cd3-4fe6-8f5f-2cd209764619"). InnerVolumeSpecName "kube-api-access-sgpz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.791271 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0c4754e0-7cd3-4fe6-8f5f-2cd209764619" (UID: "0c4754e0-7cd3-4fe6-8f5f-2cd209764619"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.886487 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgpz2\" (UniqueName: \"kubernetes.io/projected/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-kube-api-access-sgpz2\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.886517 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:03 crc kubenswrapper[4611]: I0929 14:15:03.886527 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c4754e0-7cd3-4fe6-8f5f-2cd209764619-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:04 crc kubenswrapper[4611]: I0929 14:15:04.356322 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" event={"ID":"0c4754e0-7cd3-4fe6-8f5f-2cd209764619","Type":"ContainerDied","Data":"c04be318047bc9108597f0fb63ff33678eaf73ebd3d112d1836b211e077e0797"} Sep 29 14:15:04 crc kubenswrapper[4611]: I0929 14:15:04.356388 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-7fzk5" Sep 29 14:15:04 crc kubenswrapper[4611]: I0929 14:15:04.357057 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c04be318047bc9108597f0fb63ff33678eaf73ebd3d112d1836b211e077e0797" Sep 29 14:15:04 crc kubenswrapper[4611]: I0929 14:15:04.530183 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl"] Sep 29 14:15:04 crc kubenswrapper[4611]: I0929 14:15:04.530419 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319210-897tl"] Sep 29 14:15:05 crc kubenswrapper[4611]: I0929 14:15:05.795211 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d52f2b21-2059-4956-a0cb-1ddcab3822d8" path="/var/lib/kubelet/pods/d52f2b21-2059-4956-a0cb-1ddcab3822d8/volumes" Sep 29 14:15:39 crc kubenswrapper[4611]: I0929 14:15:39.416819 4611 scope.go:117] "RemoveContainer" containerID="d5a25dd64f0756bf15f8d00b7dabcc3a0679300c3398c4740622fd3a0c26ee5e" Sep 29 14:16:04 crc kubenswrapper[4611]: I0929 14:16:04.628254 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:16:04 crc kubenswrapper[4611]: I0929 14:16:04.629529 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:16:34 crc kubenswrapper[4611]: I0929 14:16:34.629062 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:16:34 crc kubenswrapper[4611]: I0929 14:16:34.629679 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:17:04 crc kubenswrapper[4611]: I0929 14:17:04.628390 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:17:04 crc kubenswrapper[4611]: I0929 14:17:04.628972 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:17:04 crc kubenswrapper[4611]: I0929 14:17:04.629020 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:17:04 crc kubenswrapper[4611]: I0929 14:17:04.629764 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:17:04 crc kubenswrapper[4611]: I0929 14:17:04.629810 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" gracePeriod=600 Sep 29 14:17:04 crc kubenswrapper[4611]: E0929 14:17:04.790071 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:17:05 crc kubenswrapper[4611]: I0929 14:17:05.625255 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" exitCode=0 Sep 29 14:17:05 crc kubenswrapper[4611]: I0929 14:17:05.625296 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb"} Sep 29 14:17:05 crc kubenswrapper[4611]: I0929 14:17:05.625341 4611 scope.go:117] "RemoveContainer" containerID="98b742a9eb7639247c36ce459a7072dcf8e7ea94fc6539d72b442103a328cbb1" Sep 29 14:17:05 crc kubenswrapper[4611]: I0929 14:17:05.626035 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:17:05 crc kubenswrapper[4611]: E0929 14:17:05.626392 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:17:19 crc kubenswrapper[4611]: I0929 14:17:19.737308 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:17:19 crc kubenswrapper[4611]: E0929 14:17:19.739128 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:17:30 crc kubenswrapper[4611]: I0929 14:17:30.736314 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:17:30 crc kubenswrapper[4611]: E0929 14:17:30.737169 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:17:43 crc kubenswrapper[4611]: I0929 14:17:43.738086 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:17:43 crc kubenswrapper[4611]: E0929 14:17:43.739339 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:17:56 crc kubenswrapper[4611]: I0929 14:17:56.737110 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:17:56 crc kubenswrapper[4611]: E0929 14:17:56.737891 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:18:08 crc kubenswrapper[4611]: I0929 14:18:08.737186 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:18:08 crc kubenswrapper[4611]: E0929 14:18:08.738254 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:18:21 crc kubenswrapper[4611]: I0929 14:18:21.736573 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:18:21 crc kubenswrapper[4611]: E0929 14:18:21.738129 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:18:34 crc kubenswrapper[4611]: I0929 14:18:34.736008 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:18:34 crc kubenswrapper[4611]: E0929 14:18:34.737648 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:18:46 crc kubenswrapper[4611]: I0929 14:18:46.737201 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:18:46 crc kubenswrapper[4611]: E0929 14:18:46.737972 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:18:58 crc kubenswrapper[4611]: I0929 14:18:58.736154 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:18:58 crc kubenswrapper[4611]: E0929 14:18:58.737080 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:19:10 crc kubenswrapper[4611]: I0929 14:19:10.737124 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:19:10 crc kubenswrapper[4611]: E0929 14:19:10.737965 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:19:24 crc kubenswrapper[4611]: I0929 14:19:24.736421 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:19:24 crc kubenswrapper[4611]: E0929 14:19:24.737235 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:19:37 crc kubenswrapper[4611]: I0929 14:19:37.737066 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:19:37 crc kubenswrapper[4611]: E0929 14:19:37.738182 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:19:49 crc kubenswrapper[4611]: I0929 14:19:49.736615 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:19:49 crc kubenswrapper[4611]: E0929 14:19:49.737504 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:20:04 crc kubenswrapper[4611]: I0929 14:20:04.736259 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:20:04 crc kubenswrapper[4611]: E0929 14:20:04.737174 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:20:15 crc kubenswrapper[4611]: I0929 14:20:15.737355 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:20:15 crc kubenswrapper[4611]: E0929 14:20:15.739308 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:20:26 crc kubenswrapper[4611]: I0929 14:20:26.737370 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:20:26 crc kubenswrapper[4611]: E0929 14:20:26.738378 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:20:41 crc kubenswrapper[4611]: I0929 14:20:41.737129 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:20:41 crc kubenswrapper[4611]: E0929 14:20:41.738621 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:20:53 crc kubenswrapper[4611]: I0929 14:20:53.742967 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:20:53 crc kubenswrapper[4611]: E0929 14:20:53.743964 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:21:08 crc kubenswrapper[4611]: I0929 14:21:08.736839 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:21:08 crc kubenswrapper[4611]: E0929 14:21:08.737841 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:21:23 crc kubenswrapper[4611]: I0929 14:21:23.743557 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:21:23 crc kubenswrapper[4611]: E0929 14:21:23.744789 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:21:35 crc kubenswrapper[4611]: I0929 14:21:35.736837 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:21:35 crc kubenswrapper[4611]: E0929 14:21:35.738054 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:21:48 crc kubenswrapper[4611]: I0929 14:21:48.736858 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:21:48 crc kubenswrapper[4611]: E0929 14:21:48.737755 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:22:02 crc kubenswrapper[4611]: I0929 14:22:02.736969 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:22:02 crc kubenswrapper[4611]: E0929 14:22:02.737800 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:22:16 crc kubenswrapper[4611]: I0929 14:22:16.736698 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:22:17 crc kubenswrapper[4611]: I0929 14:22:17.812725 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"e52f81ba8800d9cb3f8183fe3245e2cbe670ddde36b654f916731c15b981e641"} Sep 29 14:22:34 crc kubenswrapper[4611]: E0929 14:22:34.557396 4611 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.74:60236->38.102.83.74:46265: write tcp 38.102.83.74:60236->38.102.83.74:46265: write: broken pipe Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.691181 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8q7bj"] Sep 29 14:22:35 crc kubenswrapper[4611]: E0929 14:22:35.692854 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c4754e0-7cd3-4fe6-8f5f-2cd209764619" containerName="collect-profiles" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.693001 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c4754e0-7cd3-4fe6-8f5f-2cd209764619" containerName="collect-profiles" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.693354 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c4754e0-7cd3-4fe6-8f5f-2cd209764619" containerName="collect-profiles" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.695492 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.710482 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8q7bj"] Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.823936 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-utilities\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.824042 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72bkq\" (UniqueName: \"kubernetes.io/projected/89d0007d-66cd-4e92-9f76-34a46a00dbc8-kube-api-access-72bkq\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.824189 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-catalog-content\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.926756 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72bkq\" (UniqueName: \"kubernetes.io/projected/89d0007d-66cd-4e92-9f76-34a46a00dbc8-kube-api-access-72bkq\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.927294 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-catalog-content\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.927768 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-catalog-content\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.927861 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-utilities\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.928112 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-utilities\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:35 crc kubenswrapper[4611]: I0929 14:22:35.960295 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72bkq\" (UniqueName: \"kubernetes.io/projected/89d0007d-66cd-4e92-9f76-34a46a00dbc8-kube-api-access-72bkq\") pod \"certified-operators-8q7bj\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:36 crc kubenswrapper[4611]: I0929 14:22:36.014146 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:36 crc kubenswrapper[4611]: W0929 14:22:36.793509 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89d0007d_66cd_4e92_9f76_34a46a00dbc8.slice/crio-0363f70321ea0141aaccc601ea7e064df17e8d3a85132907dc763b773a725668 WatchSource:0}: Error finding container 0363f70321ea0141aaccc601ea7e064df17e8d3a85132907dc763b773a725668: Status 404 returned error can't find the container with id 0363f70321ea0141aaccc601ea7e064df17e8d3a85132907dc763b773a725668 Sep 29 14:22:36 crc kubenswrapper[4611]: I0929 14:22:36.796539 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8q7bj"] Sep 29 14:22:37 crc kubenswrapper[4611]: I0929 14:22:37.000958 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerStarted","Data":"0363f70321ea0141aaccc601ea7e064df17e8d3a85132907dc763b773a725668"} Sep 29 14:22:38 crc kubenswrapper[4611]: I0929 14:22:38.016716 4611 generic.go:334] "Generic (PLEG): container finished" podID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerID="c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a" exitCode=0 Sep 29 14:22:38 crc kubenswrapper[4611]: I0929 14:22:38.016783 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerDied","Data":"c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a"} Sep 29 14:22:38 crc kubenswrapper[4611]: I0929 14:22:38.020284 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:22:40 crc kubenswrapper[4611]: I0929 14:22:40.035927 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerStarted","Data":"a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63"} Sep 29 14:22:42 crc kubenswrapper[4611]: I0929 14:22:42.064719 4611 generic.go:334] "Generic (PLEG): container finished" podID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerID="a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63" exitCode=0 Sep 29 14:22:42 crc kubenswrapper[4611]: I0929 14:22:42.064807 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerDied","Data":"a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63"} Sep 29 14:22:43 crc kubenswrapper[4611]: I0929 14:22:43.077056 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerStarted","Data":"e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a"} Sep 29 14:22:43 crc kubenswrapper[4611]: I0929 14:22:43.105700 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8q7bj" podStartSLOduration=3.63134123 podStartE2EDuration="8.10567957s" podCreationTimestamp="2025-09-29 14:22:35 +0000 UTC" firstStartedPulling="2025-09-29 14:22:38.019890838 +0000 UTC m=+6144.911410474" lastFinishedPulling="2025-09-29 14:22:42.494229208 +0000 UTC m=+6149.385748814" observedRunningTime="2025-09-29 14:22:43.101261083 +0000 UTC m=+6149.992780689" watchObservedRunningTime="2025-09-29 14:22:43.10567957 +0000 UTC m=+6149.997199196" Sep 29 14:22:46 crc kubenswrapper[4611]: I0929 14:22:46.014935 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:46 crc kubenswrapper[4611]: I0929 14:22:46.015912 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:47 crc kubenswrapper[4611]: I0929 14:22:47.080350 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-8q7bj" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="registry-server" probeResult="failure" output=< Sep 29 14:22:47 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:22:47 crc kubenswrapper[4611]: > Sep 29 14:22:56 crc kubenswrapper[4611]: I0929 14:22:56.114715 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:56 crc kubenswrapper[4611]: I0929 14:22:56.212833 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:56 crc kubenswrapper[4611]: I0929 14:22:56.371161 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8q7bj"] Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.235695 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8q7bj" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="registry-server" containerID="cri-o://e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a" gracePeriod=2 Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.752538 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.902756 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-utilities\") pod \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.903118 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-catalog-content\") pod \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.903192 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72bkq\" (UniqueName: \"kubernetes.io/projected/89d0007d-66cd-4e92-9f76-34a46a00dbc8-kube-api-access-72bkq\") pod \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\" (UID: \"89d0007d-66cd-4e92-9f76-34a46a00dbc8\") " Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.905122 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-utilities" (OuterVolumeSpecName: "utilities") pod "89d0007d-66cd-4e92-9f76-34a46a00dbc8" (UID: "89d0007d-66cd-4e92-9f76-34a46a00dbc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.909710 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.914093 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89d0007d-66cd-4e92-9f76-34a46a00dbc8-kube-api-access-72bkq" (OuterVolumeSpecName: "kube-api-access-72bkq") pod "89d0007d-66cd-4e92-9f76-34a46a00dbc8" (UID: "89d0007d-66cd-4e92-9f76-34a46a00dbc8"). InnerVolumeSpecName "kube-api-access-72bkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:22:57 crc kubenswrapper[4611]: I0929 14:22:57.950150 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89d0007d-66cd-4e92-9f76-34a46a00dbc8" (UID: "89d0007d-66cd-4e92-9f76-34a46a00dbc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.011709 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89d0007d-66cd-4e92-9f76-34a46a00dbc8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.011746 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72bkq\" (UniqueName: \"kubernetes.io/projected/89d0007d-66cd-4e92-9f76-34a46a00dbc8-kube-api-access-72bkq\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.247051 4611 generic.go:334] "Generic (PLEG): container finished" podID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerID="e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a" exitCode=0 Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.247104 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerDied","Data":"e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a"} Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.247148 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q7bj" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.247185 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q7bj" event={"ID":"89d0007d-66cd-4e92-9f76-34a46a00dbc8","Type":"ContainerDied","Data":"0363f70321ea0141aaccc601ea7e064df17e8d3a85132907dc763b773a725668"} Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.247226 4611 scope.go:117] "RemoveContainer" containerID="e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.269343 4611 scope.go:117] "RemoveContainer" containerID="a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.291959 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8q7bj"] Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.302668 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8q7bj"] Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.308336 4611 scope.go:117] "RemoveContainer" containerID="c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.341786 4611 scope.go:117] "RemoveContainer" containerID="e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a" Sep 29 14:22:58 crc kubenswrapper[4611]: E0929 14:22:58.342449 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a\": container with ID starting with e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a not found: ID does not exist" containerID="e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.342488 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a"} err="failed to get container status \"e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a\": rpc error: code = NotFound desc = could not find container \"e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a\": container with ID starting with e1ca58b1574c3022beca3f403c79d58c72298bd0bdf98fcec54dad380a48384a not found: ID does not exist" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.342514 4611 scope.go:117] "RemoveContainer" containerID="a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63" Sep 29 14:22:58 crc kubenswrapper[4611]: E0929 14:22:58.342907 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63\": container with ID starting with a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63 not found: ID does not exist" containerID="a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.342944 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63"} err="failed to get container status \"a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63\": rpc error: code = NotFound desc = could not find container \"a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63\": container with ID starting with a12a7809576e6e6804f90268168ca84c65708c4154108dfc7ec36bd2f0297e63 not found: ID does not exist" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.342974 4611 scope.go:117] "RemoveContainer" containerID="c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a" Sep 29 14:22:58 crc kubenswrapper[4611]: E0929 14:22:58.345932 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a\": container with ID starting with c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a not found: ID does not exist" containerID="c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a" Sep 29 14:22:58 crc kubenswrapper[4611]: I0929 14:22:58.346004 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a"} err="failed to get container status \"c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a\": rpc error: code = NotFound desc = could not find container \"c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a\": container with ID starting with c08b600505b11e316ae80e1ba41137d223d726542c719c59a57b6e2a84b3398a not found: ID does not exist" Sep 29 14:22:59 crc kubenswrapper[4611]: I0929 14:22:59.754921 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" path="/var/lib/kubelet/pods/89d0007d-66cd-4e92-9f76-34a46a00dbc8/volumes" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.603638 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t6hnb"] Sep 29 14:23:09 crc kubenswrapper[4611]: E0929 14:23:09.604564 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="extract-utilities" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.604610 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="extract-utilities" Sep 29 14:23:09 crc kubenswrapper[4611]: E0929 14:23:09.604654 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="registry-server" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.604662 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="registry-server" Sep 29 14:23:09 crc kubenswrapper[4611]: E0929 14:23:09.604691 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="extract-content" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.604698 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="extract-content" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.606290 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="89d0007d-66cd-4e92-9f76-34a46a00dbc8" containerName="registry-server" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.607941 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.628455 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t6hnb"] Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.743404 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-catalog-content\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.743905 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-utilities\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.743953 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85pzw\" (UniqueName: \"kubernetes.io/projected/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-kube-api-access-85pzw\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.845590 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-catalog-content\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.845736 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-utilities\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.845767 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85pzw\" (UniqueName: \"kubernetes.io/projected/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-kube-api-access-85pzw\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.846095 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-catalog-content\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.846369 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-utilities\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.868439 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85pzw\" (UniqueName: \"kubernetes.io/projected/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-kube-api-access-85pzw\") pod \"redhat-operators-t6hnb\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:09 crc kubenswrapper[4611]: I0929 14:23:09.929391 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:10 crc kubenswrapper[4611]: I0929 14:23:10.448945 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t6hnb"] Sep 29 14:23:11 crc kubenswrapper[4611]: I0929 14:23:11.381667 4611 generic.go:334] "Generic (PLEG): container finished" podID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerID="a0bc1d7ae64d432ee45d034f66743e3529138adbd1e0700154a078ae03ac635b" exitCode=0 Sep 29 14:23:11 crc kubenswrapper[4611]: I0929 14:23:11.381731 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerDied","Data":"a0bc1d7ae64d432ee45d034f66743e3529138adbd1e0700154a078ae03ac635b"} Sep 29 14:23:11 crc kubenswrapper[4611]: I0929 14:23:11.381904 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerStarted","Data":"0e946a847c1a20e145ef3f596b79e2958605eb1bd4248c425fdc99733069147e"} Sep 29 14:23:13 crc kubenswrapper[4611]: I0929 14:23:13.402493 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerStarted","Data":"1c5898af792617b5025d87849a699fa7f7d94d0ab598f66adbea79e4b789bc75"} Sep 29 14:23:17 crc kubenswrapper[4611]: I0929 14:23:17.444521 4611 generic.go:334] "Generic (PLEG): container finished" podID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerID="1c5898af792617b5025d87849a699fa7f7d94d0ab598f66adbea79e4b789bc75" exitCode=0 Sep 29 14:23:17 crc kubenswrapper[4611]: I0929 14:23:17.444616 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerDied","Data":"1c5898af792617b5025d87849a699fa7f7d94d0ab598f66adbea79e4b789bc75"} Sep 29 14:23:18 crc kubenswrapper[4611]: I0929 14:23:18.456842 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerStarted","Data":"87c68ea9d85fb2223325dd818065a38e47ecc11f1a2eadf74039a89462064761"} Sep 29 14:23:18 crc kubenswrapper[4611]: I0929 14:23:18.478316 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t6hnb" podStartSLOduration=2.938007913 podStartE2EDuration="9.478297308s" podCreationTimestamp="2025-09-29 14:23:09 +0000 UTC" firstStartedPulling="2025-09-29 14:23:11.385783307 +0000 UTC m=+6178.277302913" lastFinishedPulling="2025-09-29 14:23:17.926072682 +0000 UTC m=+6184.817592308" observedRunningTime="2025-09-29 14:23:18.473552391 +0000 UTC m=+6185.365072017" watchObservedRunningTime="2025-09-29 14:23:18.478297308 +0000 UTC m=+6185.369816924" Sep 29 14:23:19 crc kubenswrapper[4611]: I0929 14:23:19.931698 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:19 crc kubenswrapper[4611]: I0929 14:23:19.932548 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:20 crc kubenswrapper[4611]: I0929 14:23:20.985453 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t6hnb" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" probeResult="failure" output=< Sep 29 14:23:20 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:23:20 crc kubenswrapper[4611]: > Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.328977 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xcbml"] Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.343066 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xcbml"] Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.343415 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.438982 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-utilities\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.439113 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-catalog-content\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.439183 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p479s\" (UniqueName: \"kubernetes.io/projected/e6716442-1154-44a6-893a-931a08676086-kube-api-access-p479s\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.540689 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-catalog-content\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.540786 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p479s\" (UniqueName: \"kubernetes.io/projected/e6716442-1154-44a6-893a-931a08676086-kube-api-access-p479s\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.540859 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-utilities\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.541327 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-utilities\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.541528 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-catalog-content\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.565721 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p479s\" (UniqueName: \"kubernetes.io/projected/e6716442-1154-44a6-893a-931a08676086-kube-api-access-p479s\") pod \"community-operators-xcbml\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:29 crc kubenswrapper[4611]: I0929 14:23:29.678170 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:30 crc kubenswrapper[4611]: I0929 14:23:30.292108 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xcbml"] Sep 29 14:23:30 crc kubenswrapper[4611]: W0929 14:23:30.301148 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6716442_1154_44a6_893a_931a08676086.slice/crio-49cb3487a6e3d41fef63d006f86f3773c902f4b1d07f16f8f4dc82b96af12df3 WatchSource:0}: Error finding container 49cb3487a6e3d41fef63d006f86f3773c902f4b1d07f16f8f4dc82b96af12df3: Status 404 returned error can't find the container with id 49cb3487a6e3d41fef63d006f86f3773c902f4b1d07f16f8f4dc82b96af12df3 Sep 29 14:23:30 crc kubenswrapper[4611]: I0929 14:23:30.565834 4611 generic.go:334] "Generic (PLEG): container finished" podID="e6716442-1154-44a6-893a-931a08676086" containerID="54a43191b9fb7e7e612f668019c00a183ed972bc5f90879a276818d6d97cf101" exitCode=0 Sep 29 14:23:30 crc kubenswrapper[4611]: I0929 14:23:30.565899 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerDied","Data":"54a43191b9fb7e7e612f668019c00a183ed972bc5f90879a276818d6d97cf101"} Sep 29 14:23:30 crc kubenswrapper[4611]: I0929 14:23:30.566104 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerStarted","Data":"49cb3487a6e3d41fef63d006f86f3773c902f4b1d07f16f8f4dc82b96af12df3"} Sep 29 14:23:31 crc kubenswrapper[4611]: I0929 14:23:31.001141 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t6hnb" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" probeResult="failure" output=< Sep 29 14:23:31 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:23:31 crc kubenswrapper[4611]: > Sep 29 14:23:32 crc kubenswrapper[4611]: I0929 14:23:32.588765 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerStarted","Data":"ed097833ae1c14308495c08fee84aa5e1118f84229103d8d8cfb2cf6ffc8a36a"} Sep 29 14:23:33 crc kubenswrapper[4611]: I0929 14:23:33.599552 4611 generic.go:334] "Generic (PLEG): container finished" podID="e6716442-1154-44a6-893a-931a08676086" containerID="ed097833ae1c14308495c08fee84aa5e1118f84229103d8d8cfb2cf6ffc8a36a" exitCode=0 Sep 29 14:23:33 crc kubenswrapper[4611]: I0929 14:23:33.599850 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerDied","Data":"ed097833ae1c14308495c08fee84aa5e1118f84229103d8d8cfb2cf6ffc8a36a"} Sep 29 14:23:35 crc kubenswrapper[4611]: I0929 14:23:35.621575 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerStarted","Data":"e056621ae4d84e43c4a336ed0bc4f54fd94eee2ce75e46aa8e5f3dc8afb17b73"} Sep 29 14:23:35 crc kubenswrapper[4611]: I0929 14:23:35.639101 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xcbml" podStartSLOduration=2.866349309 podStartE2EDuration="6.639084546s" podCreationTimestamp="2025-09-29 14:23:29 +0000 UTC" firstStartedPulling="2025-09-29 14:23:30.568975755 +0000 UTC m=+6197.460495371" lastFinishedPulling="2025-09-29 14:23:34.341710992 +0000 UTC m=+6201.233230608" observedRunningTime="2025-09-29 14:23:35.638152049 +0000 UTC m=+6202.529671645" watchObservedRunningTime="2025-09-29 14:23:35.639084546 +0000 UTC m=+6202.530604152" Sep 29 14:23:39 crc kubenswrapper[4611]: I0929 14:23:39.678927 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:39 crc kubenswrapper[4611]: I0929 14:23:39.679386 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:40 crc kubenswrapper[4611]: I0929 14:23:40.732558 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-xcbml" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="registry-server" probeResult="failure" output=< Sep 29 14:23:40 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:23:40 crc kubenswrapper[4611]: > Sep 29 14:23:40 crc kubenswrapper[4611]: I0929 14:23:40.976881 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t6hnb" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" probeResult="failure" output=< Sep 29 14:23:40 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:23:40 crc kubenswrapper[4611]: > Sep 29 14:23:49 crc kubenswrapper[4611]: I0929 14:23:49.752038 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:49 crc kubenswrapper[4611]: I0929 14:23:49.816113 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:49 crc kubenswrapper[4611]: I0929 14:23:49.991332 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xcbml"] Sep 29 14:23:49 crc kubenswrapper[4611]: I0929 14:23:49.992828 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:50 crc kubenswrapper[4611]: I0929 14:23:50.042862 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:51 crc kubenswrapper[4611]: I0929 14:23:51.766335 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xcbml" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="registry-server" containerID="cri-o://e056621ae4d84e43c4a336ed0bc4f54fd94eee2ce75e46aa8e5f3dc8afb17b73" gracePeriod=2 Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.392298 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t6hnb"] Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.392906 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t6hnb" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" containerID="cri-o://87c68ea9d85fb2223325dd818065a38e47ecc11f1a2eadf74039a89462064761" gracePeriod=2 Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.780896 4611 generic.go:334] "Generic (PLEG): container finished" podID="e6716442-1154-44a6-893a-931a08676086" containerID="e056621ae4d84e43c4a336ed0bc4f54fd94eee2ce75e46aa8e5f3dc8afb17b73" exitCode=0 Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.780970 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerDied","Data":"e056621ae4d84e43c4a336ed0bc4f54fd94eee2ce75e46aa8e5f3dc8afb17b73"} Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.781018 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcbml" event={"ID":"e6716442-1154-44a6-893a-931a08676086","Type":"ContainerDied","Data":"49cb3487a6e3d41fef63d006f86f3773c902f4b1d07f16f8f4dc82b96af12df3"} Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.781034 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49cb3487a6e3d41fef63d006f86f3773c902f4b1d07f16f8f4dc82b96af12df3" Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.783727 4611 generic.go:334] "Generic (PLEG): container finished" podID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerID="87c68ea9d85fb2223325dd818065a38e47ecc11f1a2eadf74039a89462064761" exitCode=0 Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.783764 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerDied","Data":"87c68ea9d85fb2223325dd818065a38e47ecc11f1a2eadf74039a89462064761"} Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.800820 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.878258 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.944895 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p479s\" (UniqueName: \"kubernetes.io/projected/e6716442-1154-44a6-893a-931a08676086-kube-api-access-p479s\") pod \"e6716442-1154-44a6-893a-931a08676086\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.944969 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-utilities\") pod \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.945032 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-utilities\") pod \"e6716442-1154-44a6-893a-931a08676086\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.945072 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-catalog-content\") pod \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.945101 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85pzw\" (UniqueName: \"kubernetes.io/projected/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-kube-api-access-85pzw\") pod \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\" (UID: \"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b\") " Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.945128 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-catalog-content\") pod \"e6716442-1154-44a6-893a-931a08676086\" (UID: \"e6716442-1154-44a6-893a-931a08676086\") " Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.946479 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-utilities" (OuterVolumeSpecName: "utilities") pod "e6716442-1154-44a6-893a-931a08676086" (UID: "e6716442-1154-44a6-893a-931a08676086"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.946747 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-utilities" (OuterVolumeSpecName: "utilities") pod "b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" (UID: "b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.967070 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6716442-1154-44a6-893a-931a08676086-kube-api-access-p479s" (OuterVolumeSpecName: "kube-api-access-p479s") pod "e6716442-1154-44a6-893a-931a08676086" (UID: "e6716442-1154-44a6-893a-931a08676086"). InnerVolumeSpecName "kube-api-access-p479s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:23:52 crc kubenswrapper[4611]: I0929 14:23:52.973830 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-kube-api-access-85pzw" (OuterVolumeSpecName: "kube-api-access-85pzw") pod "b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" (UID: "b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b"). InnerVolumeSpecName "kube-api-access-85pzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.011403 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6716442-1154-44a6-893a-931a08676086" (UID: "e6716442-1154-44a6-893a-931a08676086"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.047967 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.048021 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85pzw\" (UniqueName: \"kubernetes.io/projected/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-kube-api-access-85pzw\") on node \"crc\" DevicePath \"\"" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.048584 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6716442-1154-44a6-893a-931a08676086-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.048606 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p479s\" (UniqueName: \"kubernetes.io/projected/e6716442-1154-44a6-893a-931a08676086-kube-api-access-p479s\") on node \"crc\" DevicePath \"\"" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.048639 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.048733 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" (UID: "b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.150558 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.795849 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcbml" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.796675 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6hnb" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.797086 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6hnb" event={"ID":"b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b","Type":"ContainerDied","Data":"0e946a847c1a20e145ef3f596b79e2958605eb1bd4248c425fdc99733069147e"} Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.797126 4611 scope.go:117] "RemoveContainer" containerID="87c68ea9d85fb2223325dd818065a38e47ecc11f1a2eadf74039a89462064761" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.826617 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t6hnb"] Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.839617 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t6hnb"] Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.840494 4611 scope.go:117] "RemoveContainer" containerID="1c5898af792617b5025d87849a699fa7f7d94d0ab598f66adbea79e4b789bc75" Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.850248 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xcbml"] Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.858729 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xcbml"] Sep 29 14:23:53 crc kubenswrapper[4611]: I0929 14:23:53.868834 4611 scope.go:117] "RemoveContainer" containerID="a0bc1d7ae64d432ee45d034f66743e3529138adbd1e0700154a078ae03ac635b" Sep 29 14:23:55 crc kubenswrapper[4611]: I0929 14:23:55.749312 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" path="/var/lib/kubelet/pods/b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b/volumes" Sep 29 14:23:55 crc kubenswrapper[4611]: I0929 14:23:55.751861 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6716442-1154-44a6-893a-931a08676086" path="/var/lib/kubelet/pods/e6716442-1154-44a6-893a-931a08676086/volumes" Sep 29 14:24:34 crc kubenswrapper[4611]: I0929 14:24:34.628107 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:24:34 crc kubenswrapper[4611]: I0929 14:24:34.628673 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:25:04 crc kubenswrapper[4611]: I0929 14:25:04.628984 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:25:04 crc kubenswrapper[4611]: I0929 14:25:04.629469 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.628645 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.629139 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.629186 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.629893 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e52f81ba8800d9cb3f8183fe3245e2cbe670ddde36b654f916731c15b981e641"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.629943 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://e52f81ba8800d9cb3f8183fe3245e2cbe670ddde36b654f916731c15b981e641" gracePeriod=600 Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.875444 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="e52f81ba8800d9cb3f8183fe3245e2cbe670ddde36b654f916731c15b981e641" exitCode=0 Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.875508 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"e52f81ba8800d9cb3f8183fe3245e2cbe670ddde36b654f916731c15b981e641"} Sep 29 14:25:34 crc kubenswrapper[4611]: I0929 14:25:34.875854 4611 scope.go:117] "RemoveContainer" containerID="809cdc34468328548ede13e975efef0b9806c084b5c11c999605e67b119e08bb" Sep 29 14:25:35 crc kubenswrapper[4611]: I0929 14:25:35.889359 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7"} Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.638864 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc89"] Sep 29 14:27:52 crc kubenswrapper[4611]: E0929 14:27:52.639877 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="extract-utilities" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.639892 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="extract-utilities" Sep 29 14:27:52 crc kubenswrapper[4611]: E0929 14:27:52.639912 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="registry-server" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.639919 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="registry-server" Sep 29 14:27:52 crc kubenswrapper[4611]: E0929 14:27:52.639935 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="extract-utilities" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.639943 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="extract-utilities" Sep 29 14:27:52 crc kubenswrapper[4611]: E0929 14:27:52.639958 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="extract-content" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.639965 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="extract-content" Sep 29 14:27:52 crc kubenswrapper[4611]: E0929 14:27:52.639980 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.640001 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" Sep 29 14:27:52 crc kubenswrapper[4611]: E0929 14:27:52.640019 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="extract-content" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.640026 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="extract-content" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.640257 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b95c9999-c0e1-43a7-8b9e-7ffddbd77b8b" containerName="registry-server" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.640274 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6716442-1154-44a6-893a-931a08676086" containerName="registry-server" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.642346 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.652980 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc89"] Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.774049 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-utilities\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.774206 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5msml\" (UniqueName: \"kubernetes.io/projected/dc428306-6103-45c9-9036-e41314027659-kube-api-access-5msml\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.774257 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-catalog-content\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.875439 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5msml\" (UniqueName: \"kubernetes.io/projected/dc428306-6103-45c9-9036-e41314027659-kube-api-access-5msml\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.875715 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-catalog-content\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.875984 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-utilities\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.876437 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-utilities\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.876979 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-catalog-content\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.906370 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5msml\" (UniqueName: \"kubernetes.io/projected/dc428306-6103-45c9-9036-e41314027659-kube-api-access-5msml\") pod \"redhat-marketplace-4bc89\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:52 crc kubenswrapper[4611]: I0929 14:27:52.965611 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:27:53 crc kubenswrapper[4611]: I0929 14:27:53.577308 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc89"] Sep 29 14:27:54 crc kubenswrapper[4611]: I0929 14:27:54.292900 4611 generic.go:334] "Generic (PLEG): container finished" podID="dc428306-6103-45c9-9036-e41314027659" containerID="912e8ceab1cbf8008998acf74b1ffe8f1b46f9596854286c4b0bc0099c5168ca" exitCode=0 Sep 29 14:27:54 crc kubenswrapper[4611]: I0929 14:27:54.292991 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerDied","Data":"912e8ceab1cbf8008998acf74b1ffe8f1b46f9596854286c4b0bc0099c5168ca"} Sep 29 14:27:54 crc kubenswrapper[4611]: I0929 14:27:54.293187 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerStarted","Data":"161b90aa945173248b655af68f279bb57d9e96249b696bdd8e0324e62378c106"} Sep 29 14:27:54 crc kubenswrapper[4611]: I0929 14:27:54.295435 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:27:56 crc kubenswrapper[4611]: I0929 14:27:56.319897 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerStarted","Data":"c243ebd97d325d2a378e4a5f02dc503252c177e86dc1c4b734952c504421a39c"} Sep 29 14:27:57 crc kubenswrapper[4611]: I0929 14:27:57.365070 4611 generic.go:334] "Generic (PLEG): container finished" podID="dc428306-6103-45c9-9036-e41314027659" containerID="c243ebd97d325d2a378e4a5f02dc503252c177e86dc1c4b734952c504421a39c" exitCode=0 Sep 29 14:27:57 crc kubenswrapper[4611]: I0929 14:27:57.366184 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerDied","Data":"c243ebd97d325d2a378e4a5f02dc503252c177e86dc1c4b734952c504421a39c"} Sep 29 14:27:59 crc kubenswrapper[4611]: I0929 14:27:59.388828 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerStarted","Data":"958efa7f39d05b4a638a6f4b1714959398db7a9c7cda3d66f8f1a9f125d2a6e9"} Sep 29 14:27:59 crc kubenswrapper[4611]: I0929 14:27:59.408126 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4bc89" podStartSLOduration=3.450984225 podStartE2EDuration="7.408108278s" podCreationTimestamp="2025-09-29 14:27:52 +0000 UTC" firstStartedPulling="2025-09-29 14:27:54.295030758 +0000 UTC m=+6461.186550364" lastFinishedPulling="2025-09-29 14:27:58.252154801 +0000 UTC m=+6465.143674417" observedRunningTime="2025-09-29 14:27:59.404089732 +0000 UTC m=+6466.295609348" watchObservedRunningTime="2025-09-29 14:27:59.408108278 +0000 UTC m=+6466.299627904" Sep 29 14:28:02 crc kubenswrapper[4611]: I0929 14:28:02.966684 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:28:02 crc kubenswrapper[4611]: I0929 14:28:02.968603 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:28:03 crc kubenswrapper[4611]: I0929 14:28:03.032703 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:28:03 crc kubenswrapper[4611]: I0929 14:28:03.513017 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:28:03 crc kubenswrapper[4611]: I0929 14:28:03.563329 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc89"] Sep 29 14:28:04 crc kubenswrapper[4611]: I0929 14:28:04.628947 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:28:04 crc kubenswrapper[4611]: I0929 14:28:04.629033 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:28:05 crc kubenswrapper[4611]: I0929 14:28:05.460766 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4bc89" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="registry-server" containerID="cri-o://958efa7f39d05b4a638a6f4b1714959398db7a9c7cda3d66f8f1a9f125d2a6e9" gracePeriod=2 Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.478618 4611 generic.go:334] "Generic (PLEG): container finished" podID="dc428306-6103-45c9-9036-e41314027659" containerID="958efa7f39d05b4a638a6f4b1714959398db7a9c7cda3d66f8f1a9f125d2a6e9" exitCode=0 Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.478920 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerDied","Data":"958efa7f39d05b4a638a6f4b1714959398db7a9c7cda3d66f8f1a9f125d2a6e9"} Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.700357 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.754971 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-catalog-content\") pod \"dc428306-6103-45c9-9036-e41314027659\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.755321 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-utilities\") pod \"dc428306-6103-45c9-9036-e41314027659\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.755353 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5msml\" (UniqueName: \"kubernetes.io/projected/dc428306-6103-45c9-9036-e41314027659-kube-api-access-5msml\") pod \"dc428306-6103-45c9-9036-e41314027659\" (UID: \"dc428306-6103-45c9-9036-e41314027659\") " Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.755958 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-utilities" (OuterVolumeSpecName: "utilities") pod "dc428306-6103-45c9-9036-e41314027659" (UID: "dc428306-6103-45c9-9036-e41314027659"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.770061 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc428306-6103-45c9-9036-e41314027659-kube-api-access-5msml" (OuterVolumeSpecName: "kube-api-access-5msml") pod "dc428306-6103-45c9-9036-e41314027659" (UID: "dc428306-6103-45c9-9036-e41314027659"). InnerVolumeSpecName "kube-api-access-5msml". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.776846 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc428306-6103-45c9-9036-e41314027659" (UID: "dc428306-6103-45c9-9036-e41314027659"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.858280 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.858325 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5msml\" (UniqueName: \"kubernetes.io/projected/dc428306-6103-45c9-9036-e41314027659-kube-api-access-5msml\") on node \"crc\" DevicePath \"\"" Sep 29 14:28:06 crc kubenswrapper[4611]: I0929 14:28:06.858345 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc428306-6103-45c9-9036-e41314027659-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.489452 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc89" event={"ID":"dc428306-6103-45c9-9036-e41314027659","Type":"ContainerDied","Data":"161b90aa945173248b655af68f279bb57d9e96249b696bdd8e0324e62378c106"} Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.489792 4611 scope.go:117] "RemoveContainer" containerID="958efa7f39d05b4a638a6f4b1714959398db7a9c7cda3d66f8f1a9f125d2a6e9" Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.489488 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc89" Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.523490 4611 scope.go:117] "RemoveContainer" containerID="c243ebd97d325d2a378e4a5f02dc503252c177e86dc1c4b734952c504421a39c" Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.530561 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc89"] Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.541962 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc89"] Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.545282 4611 scope.go:117] "RemoveContainer" containerID="912e8ceab1cbf8008998acf74b1ffe8f1b46f9596854286c4b0bc0099c5168ca" Sep 29 14:28:07 crc kubenswrapper[4611]: I0929 14:28:07.748308 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc428306-6103-45c9-9036-e41314027659" path="/var/lib/kubelet/pods/dc428306-6103-45c9-9036-e41314027659/volumes" Sep 29 14:28:34 crc kubenswrapper[4611]: I0929 14:28:34.628680 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:28:34 crc kubenswrapper[4611]: I0929 14:28:34.629249 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:29:04 crc kubenswrapper[4611]: I0929 14:29:04.628120 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:29:04 crc kubenswrapper[4611]: I0929 14:29:04.628578 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:29:04 crc kubenswrapper[4611]: I0929 14:29:04.628643 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:29:04 crc kubenswrapper[4611]: I0929 14:29:04.630261 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:29:04 crc kubenswrapper[4611]: I0929 14:29:04.630331 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" gracePeriod=600 Sep 29 14:29:04 crc kubenswrapper[4611]: E0929 14:29:04.853217 4611 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fea0777_8bbe_4100_806a_2580c80c902c.slice/crio-55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:29:04 crc kubenswrapper[4611]: E0929 14:29:04.872416 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:29:05 crc kubenswrapper[4611]: I0929 14:29:05.052562 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" exitCode=0 Sep 29 14:29:05 crc kubenswrapper[4611]: I0929 14:29:05.052619 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7"} Sep 29 14:29:05 crc kubenswrapper[4611]: I0929 14:29:05.052687 4611 scope.go:117] "RemoveContainer" containerID="e52f81ba8800d9cb3f8183fe3245e2cbe670ddde36b654f916731c15b981e641" Sep 29 14:29:05 crc kubenswrapper[4611]: I0929 14:29:05.053847 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:29:05 crc kubenswrapper[4611]: E0929 14:29:05.054133 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:29:17 crc kubenswrapper[4611]: I0929 14:29:17.736917 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:29:17 crc kubenswrapper[4611]: E0929 14:29:17.737740 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:29:29 crc kubenswrapper[4611]: I0929 14:29:29.736001 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:29:29 crc kubenswrapper[4611]: E0929 14:29:29.736865 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:29:39 crc kubenswrapper[4611]: I0929 14:29:39.859243 4611 scope.go:117] "RemoveContainer" containerID="e056621ae4d84e43c4a336ed0bc4f54fd94eee2ce75e46aa8e5f3dc8afb17b73" Sep 29 14:29:39 crc kubenswrapper[4611]: I0929 14:29:39.895273 4611 scope.go:117] "RemoveContainer" containerID="ed097833ae1c14308495c08fee84aa5e1118f84229103d8d8cfb2cf6ffc8a36a" Sep 29 14:29:39 crc kubenswrapper[4611]: I0929 14:29:39.916913 4611 scope.go:117] "RemoveContainer" containerID="54a43191b9fb7e7e612f668019c00a183ed972bc5f90879a276818d6d97cf101" Sep 29 14:29:41 crc kubenswrapper[4611]: I0929 14:29:41.737460 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:29:41 crc kubenswrapper[4611]: E0929 14:29:41.738318 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:29:52 crc kubenswrapper[4611]: I0929 14:29:52.737208 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:29:52 crc kubenswrapper[4611]: E0929 14:29:52.738244 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.191884 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69"] Sep 29 14:30:00 crc kubenswrapper[4611]: E0929 14:30:00.192915 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="extract-content" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.192934 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="extract-content" Sep 29 14:30:00 crc kubenswrapper[4611]: E0929 14:30:00.192968 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="extract-utilities" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.192976 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="extract-utilities" Sep 29 14:30:00 crc kubenswrapper[4611]: E0929 14:30:00.193020 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="registry-server" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.193029 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="registry-server" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.193268 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc428306-6103-45c9-9036-e41314027659" containerName="registry-server" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.194167 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.206322 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.206328 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.207518 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69"] Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.215434 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-config-volume\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.215495 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-secret-volume\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.215576 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62pnt\" (UniqueName: \"kubernetes.io/projected/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-kube-api-access-62pnt\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.317099 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62pnt\" (UniqueName: \"kubernetes.io/projected/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-kube-api-access-62pnt\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.317555 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-config-volume\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.318114 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-secret-volume\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.318527 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-config-volume\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.324430 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-secret-volume\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.336719 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62pnt\" (UniqueName: \"kubernetes.io/projected/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-kube-api-access-62pnt\") pod \"collect-profiles-29319270-fsv69\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:00 crc kubenswrapper[4611]: I0929 14:30:00.519262 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:01 crc kubenswrapper[4611]: I0929 14:30:01.013122 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69"] Sep 29 14:30:01 crc kubenswrapper[4611]: I0929 14:30:01.604948 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" event={"ID":"525d1b6a-db4c-4ad5-bb82-bb920cf5da53","Type":"ContainerStarted","Data":"71fc4828b38d8e3666ef1a1ea5204e71ee7f7184c9a736c72822be088af9b153"} Sep 29 14:30:01 crc kubenswrapper[4611]: I0929 14:30:01.605202 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" event={"ID":"525d1b6a-db4c-4ad5-bb82-bb920cf5da53","Type":"ContainerStarted","Data":"1094f44c8f3dafb23b2a6af2dcb1991d29e9432ae0b885bc1cfc66c56a109d0c"} Sep 29 14:30:01 crc kubenswrapper[4611]: I0929 14:30:01.624691 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" podStartSLOduration=1.6246761140000001 podStartE2EDuration="1.624676114s" podCreationTimestamp="2025-09-29 14:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:30:01.620964027 +0000 UTC m=+6588.512483633" watchObservedRunningTime="2025-09-29 14:30:01.624676114 +0000 UTC m=+6588.516195720" Sep 29 14:30:02 crc kubenswrapper[4611]: I0929 14:30:02.614426 4611 generic.go:334] "Generic (PLEG): container finished" podID="525d1b6a-db4c-4ad5-bb82-bb920cf5da53" containerID="71fc4828b38d8e3666ef1a1ea5204e71ee7f7184c9a736c72822be088af9b153" exitCode=0 Sep 29 14:30:02 crc kubenswrapper[4611]: I0929 14:30:02.614468 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" event={"ID":"525d1b6a-db4c-4ad5-bb82-bb920cf5da53","Type":"ContainerDied","Data":"71fc4828b38d8e3666ef1a1ea5204e71ee7f7184c9a736c72822be088af9b153"} Sep 29 14:30:03 crc kubenswrapper[4611]: I0929 14:30:03.743814 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:30:03 crc kubenswrapper[4611]: E0929 14:30:03.745023 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:30:03 crc kubenswrapper[4611]: I0929 14:30:03.990699 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.106610 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62pnt\" (UniqueName: \"kubernetes.io/projected/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-kube-api-access-62pnt\") pod \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.107057 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-secret-volume\") pod \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.107092 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-config-volume\") pod \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\" (UID: \"525d1b6a-db4c-4ad5-bb82-bb920cf5da53\") " Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.107684 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-config-volume" (OuterVolumeSpecName: "config-volume") pod "525d1b6a-db4c-4ad5-bb82-bb920cf5da53" (UID: "525d1b6a-db4c-4ad5-bb82-bb920cf5da53"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.109015 4611 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.112434 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-kube-api-access-62pnt" (OuterVolumeSpecName: "kube-api-access-62pnt") pod "525d1b6a-db4c-4ad5-bb82-bb920cf5da53" (UID: "525d1b6a-db4c-4ad5-bb82-bb920cf5da53"). InnerVolumeSpecName "kube-api-access-62pnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.113353 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "525d1b6a-db4c-4ad5-bb82-bb920cf5da53" (UID: "525d1b6a-db4c-4ad5-bb82-bb920cf5da53"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.210834 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62pnt\" (UniqueName: \"kubernetes.io/projected/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-kube-api-access-62pnt\") on node \"crc\" DevicePath \"\"" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.210864 4611 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/525d1b6a-db4c-4ad5-bb82-bb920cf5da53-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.636839 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" event={"ID":"525d1b6a-db4c-4ad5-bb82-bb920cf5da53","Type":"ContainerDied","Data":"1094f44c8f3dafb23b2a6af2dcb1991d29e9432ae0b885bc1cfc66c56a109d0c"} Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.637087 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1094f44c8f3dafb23b2a6af2dcb1991d29e9432ae0b885bc1cfc66c56a109d0c" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.637225 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-fsv69" Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.711973 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2"] Sep 29 14:30:04 crc kubenswrapper[4611]: I0929 14:30:04.720276 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-hs9v2"] Sep 29 14:30:05 crc kubenswrapper[4611]: I0929 14:30:05.748542 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="517a165f-f5fb-4788-b61b-b2ad505703ab" path="/var/lib/kubelet/pods/517a165f-f5fb-4788-b61b-b2ad505703ab/volumes" Sep 29 14:30:16 crc kubenswrapper[4611]: I0929 14:30:16.736947 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:30:16 crc kubenswrapper[4611]: E0929 14:30:16.737761 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:30:27 crc kubenswrapper[4611]: I0929 14:30:27.737408 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:30:27 crc kubenswrapper[4611]: E0929 14:30:27.738356 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:30:40 crc kubenswrapper[4611]: I0929 14:30:40.032267 4611 scope.go:117] "RemoveContainer" containerID="2fa45b9c54881dd2288a57498a227fbddf5dd54ba5e38e1f4895e0ca51908044" Sep 29 14:30:42 crc kubenswrapper[4611]: I0929 14:30:42.736499 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:30:42 crc kubenswrapper[4611]: E0929 14:30:42.737174 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:30:57 crc kubenswrapper[4611]: I0929 14:30:57.738264 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:30:57 crc kubenswrapper[4611]: E0929 14:30:57.739390 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:31:09 crc kubenswrapper[4611]: I0929 14:31:09.738551 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:31:09 crc kubenswrapper[4611]: E0929 14:31:09.742159 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:31:21 crc kubenswrapper[4611]: I0929 14:31:21.425125 4611 generic.go:334] "Generic (PLEG): container finished" podID="2d140fa2-fe3d-4e16-810f-c9b568c4554c" containerID="6614575c2c06c72c2240e272f83578a70ae0d822327ebe3564783586c69d9989" exitCode=1 Sep 29 14:31:21 crc kubenswrapper[4611]: I0929 14:31:21.425221 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2d140fa2-fe3d-4e16-810f-c9b568c4554c","Type":"ContainerDied","Data":"6614575c2c06c72c2240e272f83578a70ae0d822327ebe3564783586c69d9989"} Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.736207 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:31:23 crc kubenswrapper[4611]: E0929 14:31:22.737113 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.817494 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894359 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ca-certs\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894402 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-config-data\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894424 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-temporary\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894458 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ssh-key\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894488 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdtlz\" (UniqueName: \"kubernetes.io/projected/2d140fa2-fe3d-4e16-810f-c9b568c4554c-kube-api-access-fdtlz\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894539 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894584 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config-secret\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894713 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.894804 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-workdir\") pod \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\" (UID: \"2d140fa2-fe3d-4e16-810f-c9b568c4554c\") " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.895180 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.895423 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-config-data" (OuterVolumeSpecName: "config-data") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.896926 4611 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.900709 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.905810 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "test-operator-logs") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.920174 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d140fa2-fe3d-4e16-810f-c9b568c4554c-kube-api-access-fdtlz" (OuterVolumeSpecName: "kube-api-access-fdtlz") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "kube-api-access-fdtlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.941804 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.941825 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.944655 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.958851 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2d140fa2-fe3d-4e16-810f-c9b568c4554c" (UID: "2d140fa2-fe3d-4e16-810f-c9b568c4554c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:22.999981 4611 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000015 4611 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000029 4611 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000040 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdtlz\" (UniqueName: \"kubernetes.io/projected/2d140fa2-fe3d-4e16-810f-c9b568c4554c-kube-api-access-fdtlz\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000106 4611 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000119 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000151 4611 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d140fa2-fe3d-4e16-810f-c9b568c4554c-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.000166 4611 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2d140fa2-fe3d-4e16-810f-c9b568c4554c-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.029036 4611 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.101824 4611 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.463929 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2d140fa2-fe3d-4e16-810f-c9b568c4554c","Type":"ContainerDied","Data":"c565ad2a16f30253fde843ada4e07a46613ae8786818f16462a1876e21e4d558"} Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.464026 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c565ad2a16f30253fde843ada4e07a46613ae8786818f16462a1876e21e4d558" Sep 29 14:31:23 crc kubenswrapper[4611]: I0929 14:31:23.463992 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.807253 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 14:31:28 crc kubenswrapper[4611]: E0929 14:31:28.809041 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="525d1b6a-db4c-4ad5-bb82-bb920cf5da53" containerName="collect-profiles" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.809413 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="525d1b6a-db4c-4ad5-bb82-bb920cf5da53" containerName="collect-profiles" Sep 29 14:31:28 crc kubenswrapper[4611]: E0929 14:31:28.809499 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d140fa2-fe3d-4e16-810f-c9b568c4554c" containerName="tempest-tests-tempest-tests-runner" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.809578 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d140fa2-fe3d-4e16-810f-c9b568c4554c" containerName="tempest-tests-tempest-tests-runner" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.809872 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="525d1b6a-db4c-4ad5-bb82-bb920cf5da53" containerName="collect-profiles" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.809987 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d140fa2-fe3d-4e16-810f-c9b568c4554c" containerName="tempest-tests-tempest-tests-runner" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.810775 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.814615 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.817365 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-92hc4" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.921999 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl98z\" (UniqueName: \"kubernetes.io/projected/9059c1f4-16be-468b-82ac-58311a3d727d-kube-api-access-hl98z\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:28 crc kubenswrapper[4611]: I0929 14:31:28.922368 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.025046 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl98z\" (UniqueName: \"kubernetes.io/projected/9059c1f4-16be-468b-82ac-58311a3d727d-kube-api-access-hl98z\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.025164 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.027299 4611 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.043343 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl98z\" (UniqueName: \"kubernetes.io/projected/9059c1f4-16be-468b-82ac-58311a3d727d-kube-api-access-hl98z\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.065260 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9059c1f4-16be-468b-82ac-58311a3d727d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.133460 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:31:29 crc kubenswrapper[4611]: I0929 14:31:29.606915 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 14:31:30 crc kubenswrapper[4611]: I0929 14:31:30.553206 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9059c1f4-16be-468b-82ac-58311a3d727d","Type":"ContainerStarted","Data":"115593da13e4759d51f2ba50c764f8dda221f27b3f047be16d8aad3aa8b82f81"} Sep 29 14:31:31 crc kubenswrapper[4611]: I0929 14:31:31.565722 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9059c1f4-16be-468b-82ac-58311a3d727d","Type":"ContainerStarted","Data":"373f134aa1e9c5d806680a17bdee8b10c06f11d2e6ce0cc5cf9535bbe1a1c197"} Sep 29 14:31:31 crc kubenswrapper[4611]: I0929 14:31:31.591722 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.437030282 podStartE2EDuration="3.591693462s" podCreationTimestamp="2025-09-29 14:31:28 +0000 UTC" firstStartedPulling="2025-09-29 14:31:29.61188111 +0000 UTC m=+6676.503400726" lastFinishedPulling="2025-09-29 14:31:30.7665443 +0000 UTC m=+6677.658063906" observedRunningTime="2025-09-29 14:31:31.578999066 +0000 UTC m=+6678.470518682" watchObservedRunningTime="2025-09-29 14:31:31.591693462 +0000 UTC m=+6678.483213068" Sep 29 14:31:37 crc kubenswrapper[4611]: I0929 14:31:37.737558 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:31:37 crc kubenswrapper[4611]: E0929 14:31:37.738322 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:31:51 crc kubenswrapper[4611]: I0929 14:31:51.737375 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:31:51 crc kubenswrapper[4611]: E0929 14:31:51.738719 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.462868 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mkzpb/must-gather-jqlcx"] Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.473350 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.478925 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mkzpb"/"kube-root-ca.crt" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.479050 4611 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mkzpb"/"openshift-service-ca.crt" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.480654 4611 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-mkzpb"/"default-dockercfg-cfrl4" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.494735 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mkzpb/must-gather-jqlcx"] Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.546298 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7rh9\" (UniqueName: \"kubernetes.io/projected/7407eea2-def7-4a82-a48d-6043733a9faa-kube-api-access-z7rh9\") pod \"must-gather-jqlcx\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.546436 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7407eea2-def7-4a82-a48d-6043733a9faa-must-gather-output\") pod \"must-gather-jqlcx\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.648210 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7407eea2-def7-4a82-a48d-6043733a9faa-must-gather-output\") pod \"must-gather-jqlcx\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.648561 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7rh9\" (UniqueName: \"kubernetes.io/projected/7407eea2-def7-4a82-a48d-6043733a9faa-kube-api-access-z7rh9\") pod \"must-gather-jqlcx\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.648793 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7407eea2-def7-4a82-a48d-6043733a9faa-must-gather-output\") pod \"must-gather-jqlcx\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.667894 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7rh9\" (UniqueName: \"kubernetes.io/projected/7407eea2-def7-4a82-a48d-6043733a9faa-kube-api-access-z7rh9\") pod \"must-gather-jqlcx\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:54 crc kubenswrapper[4611]: I0929 14:31:54.858065 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:31:55 crc kubenswrapper[4611]: I0929 14:31:55.325826 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mkzpb/must-gather-jqlcx"] Sep 29 14:31:55 crc kubenswrapper[4611]: I0929 14:31:55.829522 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" event={"ID":"7407eea2-def7-4a82-a48d-6043733a9faa","Type":"ContainerStarted","Data":"2089bc904dde0124b55ed6d39c76cb8c401659afe82d0013f95c6d8754a48701"} Sep 29 14:32:04 crc kubenswrapper[4611]: I0929 14:32:04.736575 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:32:04 crc kubenswrapper[4611]: E0929 14:32:04.737473 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:32:04 crc kubenswrapper[4611]: I0929 14:32:04.962280 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" event={"ID":"7407eea2-def7-4a82-a48d-6043733a9faa","Type":"ContainerStarted","Data":"b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7"} Sep 29 14:32:04 crc kubenswrapper[4611]: I0929 14:32:04.962320 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" event={"ID":"7407eea2-def7-4a82-a48d-6043733a9faa","Type":"ContainerStarted","Data":"24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a"} Sep 29 14:32:04 crc kubenswrapper[4611]: I0929 14:32:04.982139 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" podStartSLOduration=2.578177348 podStartE2EDuration="10.982116327s" podCreationTimestamp="2025-09-29 14:31:54 +0000 UTC" firstStartedPulling="2025-09-29 14:31:55.338793747 +0000 UTC m=+6702.230313373" lastFinishedPulling="2025-09-29 14:32:03.742732746 +0000 UTC m=+6710.634252352" observedRunningTime="2025-09-29 14:32:04.982033395 +0000 UTC m=+6711.873552991" watchObservedRunningTime="2025-09-29 14:32:04.982116327 +0000 UTC m=+6711.873635953" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.303996 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-dhkjv"] Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.305861 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.417963 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brnkh\" (UniqueName: \"kubernetes.io/projected/3a4556b8-5cf4-46d1-816c-15095ce695a4-kube-api-access-brnkh\") pod \"crc-debug-dhkjv\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.418242 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a4556b8-5cf4-46d1-816c-15095ce695a4-host\") pod \"crc-debug-dhkjv\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.520126 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brnkh\" (UniqueName: \"kubernetes.io/projected/3a4556b8-5cf4-46d1-816c-15095ce695a4-kube-api-access-brnkh\") pod \"crc-debug-dhkjv\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.520216 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a4556b8-5cf4-46d1-816c-15095ce695a4-host\") pod \"crc-debug-dhkjv\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.521230 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a4556b8-5cf4-46d1-816c-15095ce695a4-host\") pod \"crc-debug-dhkjv\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.552282 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brnkh\" (UniqueName: \"kubernetes.io/projected/3a4556b8-5cf4-46d1-816c-15095ce695a4-kube-api-access-brnkh\") pod \"crc-debug-dhkjv\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:10 crc kubenswrapper[4611]: I0929 14:32:10.623202 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:32:11 crc kubenswrapper[4611]: I0929 14:32:11.017592 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" event={"ID":"3a4556b8-5cf4-46d1-816c-15095ce695a4","Type":"ContainerStarted","Data":"260f85ab193b29917b9f1468d559988446a878825754a8d6ae5018b884e35eca"} Sep 29 14:32:19 crc kubenswrapper[4611]: I0929 14:32:19.739463 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:32:19 crc kubenswrapper[4611]: E0929 14:32:19.740136 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:32:24 crc kubenswrapper[4611]: I0929 14:32:24.177044 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" event={"ID":"3a4556b8-5cf4-46d1-816c-15095ce695a4","Type":"ContainerStarted","Data":"1b7ba23579c77faa8929fb0686fddce9763028cf27c82582676f3683067e4306"} Sep 29 14:32:32 crc kubenswrapper[4611]: I0929 14:32:32.736228 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:32:32 crc kubenswrapper[4611]: E0929 14:32:32.737107 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:32:45 crc kubenswrapper[4611]: I0929 14:32:45.736996 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:32:45 crc kubenswrapper[4611]: E0929 14:32:45.737919 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.039143 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" podStartSLOduration=26.830655145 podStartE2EDuration="40.039126329s" podCreationTimestamp="2025-09-29 14:32:10 +0000 UTC" firstStartedPulling="2025-09-29 14:32:10.674534244 +0000 UTC m=+6717.566053850" lastFinishedPulling="2025-09-29 14:32:23.883005428 +0000 UTC m=+6730.774525034" observedRunningTime="2025-09-29 14:32:24.1922051 +0000 UTC m=+6731.083724726" watchObservedRunningTime="2025-09-29 14:32:50.039126329 +0000 UTC m=+6756.930645935" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.044933 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jbp9w"] Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.047509 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.231409 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6zsp\" (UniqueName: \"kubernetes.io/projected/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-kube-api-access-n6zsp\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.231924 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-catalog-content\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.232135 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-utilities\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.262067 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jbp9w"] Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.334339 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-utilities\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.334456 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6zsp\" (UniqueName: \"kubernetes.io/projected/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-kube-api-access-n6zsp\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.334605 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-catalog-content\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.335237 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-utilities\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.335245 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-catalog-content\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.364318 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6zsp\" (UniqueName: \"kubernetes.io/projected/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-kube-api-access-n6zsp\") pod \"certified-operators-jbp9w\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:50 crc kubenswrapper[4611]: I0929 14:32:50.405661 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:32:51 crc kubenswrapper[4611]: W0929 14:32:51.600133 4611 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d2f7c92_dd5b_4d0e_ac56_db2945ef6b2d.slice/crio-c57383beadafb40239cc46edec3b475fa7b54c5fbfd7886e4a3a4b663585e3d9 WatchSource:0}: Error finding container c57383beadafb40239cc46edec3b475fa7b54c5fbfd7886e4a3a4b663585e3d9: Status 404 returned error can't find the container with id c57383beadafb40239cc46edec3b475fa7b54c5fbfd7886e4a3a4b663585e3d9 Sep 29 14:32:51 crc kubenswrapper[4611]: I0929 14:32:51.602523 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jbp9w"] Sep 29 14:32:52 crc kubenswrapper[4611]: I0929 14:32:52.504943 4611 generic.go:334] "Generic (PLEG): container finished" podID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerID="835b43f22c624940b8be8283c05e81ae271ef0168cbf6daf48007cf1bf4bd168" exitCode=0 Sep 29 14:32:52 crc kubenswrapper[4611]: I0929 14:32:52.505057 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerDied","Data":"835b43f22c624940b8be8283c05e81ae271ef0168cbf6daf48007cf1bf4bd168"} Sep 29 14:32:52 crc kubenswrapper[4611]: I0929 14:32:52.505222 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerStarted","Data":"c57383beadafb40239cc46edec3b475fa7b54c5fbfd7886e4a3a4b663585e3d9"} Sep 29 14:32:54 crc kubenswrapper[4611]: I0929 14:32:54.549059 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerStarted","Data":"114894487a9e97ff5298ec49ea2cc6a234d7051bccd78e40ef4754b5f4f2f3f3"} Sep 29 14:32:56 crc kubenswrapper[4611]: I0929 14:32:56.736959 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:32:56 crc kubenswrapper[4611]: E0929 14:32:56.737518 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:32:57 crc kubenswrapper[4611]: I0929 14:32:57.577727 4611 generic.go:334] "Generic (PLEG): container finished" podID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerID="114894487a9e97ff5298ec49ea2cc6a234d7051bccd78e40ef4754b5f4f2f3f3" exitCode=0 Sep 29 14:32:57 crc kubenswrapper[4611]: I0929 14:32:57.577814 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerDied","Data":"114894487a9e97ff5298ec49ea2cc6a234d7051bccd78e40ef4754b5f4f2f3f3"} Sep 29 14:32:57 crc kubenswrapper[4611]: I0929 14:32:57.581802 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:32:58 crc kubenswrapper[4611]: I0929 14:32:58.624292 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerStarted","Data":"8888fba76d8b35efff2a6cf7760b8d125949de90a0ae9ec6db898656f8b63a78"} Sep 29 14:32:58 crc kubenswrapper[4611]: I0929 14:32:58.648284 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jbp9w" podStartSLOduration=2.871763732 podStartE2EDuration="8.648265162s" podCreationTimestamp="2025-09-29 14:32:50 +0000 UTC" firstStartedPulling="2025-09-29 14:32:52.507170423 +0000 UTC m=+6759.398690029" lastFinishedPulling="2025-09-29 14:32:58.283671853 +0000 UTC m=+6765.175191459" observedRunningTime="2025-09-29 14:32:58.643931927 +0000 UTC m=+6765.535451523" watchObservedRunningTime="2025-09-29 14:32:58.648265162 +0000 UTC m=+6765.539784768" Sep 29 14:33:00 crc kubenswrapper[4611]: I0929 14:33:00.406144 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:33:00 crc kubenswrapper[4611]: I0929 14:33:00.406771 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:33:01 crc kubenswrapper[4611]: I0929 14:33:01.602719 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jbp9w" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="registry-server" probeResult="failure" output=< Sep 29 14:33:01 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:33:01 crc kubenswrapper[4611]: > Sep 29 14:33:08 crc kubenswrapper[4611]: I0929 14:33:08.735965 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:33:08 crc kubenswrapper[4611]: E0929 14:33:08.736993 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:33:11 crc kubenswrapper[4611]: I0929 14:33:11.463509 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jbp9w" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="registry-server" probeResult="failure" output=< Sep 29 14:33:11 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:33:11 crc kubenswrapper[4611]: > Sep 29 14:33:20 crc kubenswrapper[4611]: I0929 14:33:20.477919 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:33:20 crc kubenswrapper[4611]: I0929 14:33:20.541711 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:33:21 crc kubenswrapper[4611]: I0929 14:33:21.253956 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jbp9w"] Sep 29 14:33:21 crc kubenswrapper[4611]: I0929 14:33:21.842178 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jbp9w" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="registry-server" containerID="cri-o://8888fba76d8b35efff2a6cf7760b8d125949de90a0ae9ec6db898656f8b63a78" gracePeriod=2 Sep 29 14:33:22 crc kubenswrapper[4611]: I0929 14:33:22.738091 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:33:22 crc kubenswrapper[4611]: E0929 14:33:22.738891 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:33:22 crc kubenswrapper[4611]: I0929 14:33:22.858105 4611 generic.go:334] "Generic (PLEG): container finished" podID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerID="8888fba76d8b35efff2a6cf7760b8d125949de90a0ae9ec6db898656f8b63a78" exitCode=0 Sep 29 14:33:22 crc kubenswrapper[4611]: I0929 14:33:22.858154 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerDied","Data":"8888fba76d8b35efff2a6cf7760b8d125949de90a0ae9ec6db898656f8b63a78"} Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.008895 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.123177 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-utilities\") pod \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.123301 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6zsp\" (UniqueName: \"kubernetes.io/projected/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-kube-api-access-n6zsp\") pod \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.123380 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-catalog-content\") pod \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\" (UID: \"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d\") " Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.132022 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-utilities" (OuterVolumeSpecName: "utilities") pod "2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" (UID: "2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.153170 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-kube-api-access-n6zsp" (OuterVolumeSpecName: "kube-api-access-n6zsp") pod "2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" (UID: "2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d"). InnerVolumeSpecName "kube-api-access-n6zsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.190386 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" (UID: "2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.226757 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6zsp\" (UniqueName: \"kubernetes.io/projected/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-kube-api-access-n6zsp\") on node \"crc\" DevicePath \"\"" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.226955 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.227037 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.868730 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbp9w" event={"ID":"2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d","Type":"ContainerDied","Data":"c57383beadafb40239cc46edec3b475fa7b54c5fbfd7886e4a3a4b663585e3d9"} Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.868776 4611 scope.go:117] "RemoveContainer" containerID="8888fba76d8b35efff2a6cf7760b8d125949de90a0ae9ec6db898656f8b63a78" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.868893 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbp9w" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.895215 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jbp9w"] Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.902361 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jbp9w"] Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.933200 4611 scope.go:117] "RemoveContainer" containerID="114894487a9e97ff5298ec49ea2cc6a234d7051bccd78e40ef4754b5f4f2f3f3" Sep 29 14:33:23 crc kubenswrapper[4611]: I0929 14:33:23.964669 4611 scope.go:117] "RemoveContainer" containerID="835b43f22c624940b8be8283c05e81ae271ef0168cbf6daf48007cf1bf4bd168" Sep 29 14:33:25 crc kubenswrapper[4611]: I0929 14:33:25.750100 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" path="/var/lib/kubelet/pods/2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d/volumes" Sep 29 14:33:33 crc kubenswrapper[4611]: I0929 14:33:33.736927 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:33:33 crc kubenswrapper[4611]: E0929 14:33:33.737823 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:33:46 crc kubenswrapper[4611]: I0929 14:33:46.737752 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:33:46 crc kubenswrapper[4611]: E0929 14:33:46.741571 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:33:49 crc kubenswrapper[4611]: I0929 14:33:49.607598 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-646b84b57b-d2bbm_851aa640-9ae4-4045-a9ae-94ba85cc06da/barbican-api-log/0.log" Sep 29 14:33:49 crc kubenswrapper[4611]: I0929 14:33:49.613216 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-646b84b57b-d2bbm_851aa640-9ae4-4045-a9ae-94ba85cc06da/barbican-api/0.log" Sep 29 14:33:49 crc kubenswrapper[4611]: I0929 14:33:49.873841 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7447bc4958-7qrwm_7675325b-f3c9-47e9-9992-bdc23d3a761f/barbican-keystone-listener/0.log" Sep 29 14:33:49 crc kubenswrapper[4611]: I0929 14:33:49.988602 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7447bc4958-7qrwm_7675325b-f3c9-47e9-9992-bdc23d3a761f/barbican-keystone-listener-log/0.log" Sep 29 14:33:50 crc kubenswrapper[4611]: I0929 14:33:50.264739 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-fbf8688df-k8w9k_bd065838-835d-4f4d-aa6f-27cde218b024/barbican-worker/0.log" Sep 29 14:33:50 crc kubenswrapper[4611]: I0929 14:33:50.267646 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-fbf8688df-k8w9k_bd065838-835d-4f4d-aa6f-27cde218b024/barbican-worker-log/0.log" Sep 29 14:33:50 crc kubenswrapper[4611]: I0929 14:33:50.511696 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-gmbg6_482ab873-2d1f-421c-b3b7-ec74175ad046/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:50 crc kubenswrapper[4611]: I0929 14:33:50.782066 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_335e7bb5-075d-48d3-9fa4-3570660b0b28/ceilometer-central-agent/0.log" Sep 29 14:33:50 crc kubenswrapper[4611]: I0929 14:33:50.884368 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_335e7bb5-075d-48d3-9fa4-3570660b0b28/ceilometer-notification-agent/0.log" Sep 29 14:33:50 crc kubenswrapper[4611]: I0929 14:33:50.911098 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_335e7bb5-075d-48d3-9fa4-3570660b0b28/proxy-httpd/0.log" Sep 29 14:33:51 crc kubenswrapper[4611]: I0929 14:33:51.068459 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_335e7bb5-075d-48d3-9fa4-3570660b0b28/sg-core/0.log" Sep 29 14:33:51 crc kubenswrapper[4611]: I0929 14:33:51.261825 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c16aac44-b8d5-46c2-b439-9cda8aed610d/cinder-api/0.log" Sep 29 14:33:51 crc kubenswrapper[4611]: I0929 14:33:51.367023 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c16aac44-b8d5-46c2-b439-9cda8aed610d/cinder-api-log/0.log" Sep 29 14:33:51 crc kubenswrapper[4611]: I0929 14:33:51.527494 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_998e498c-f720-44eb-9a17-7c13a2dd5b70/cinder-scheduler/0.log" Sep 29 14:33:51 crc kubenswrapper[4611]: I0929 14:33:51.714858 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_998e498c-f720-44eb-9a17-7c13a2dd5b70/probe/0.log" Sep 29 14:33:51 crc kubenswrapper[4611]: I0929 14:33:51.786823 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-f9cvj_6bc80e04-ce3c-485c-b9a0-138366726186/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:52 crc kubenswrapper[4611]: I0929 14:33:52.044911 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-hbwvh_2c0b90a0-54ca-47c2-a45e-6ce50bd04061/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:52 crc kubenswrapper[4611]: I0929 14:33:52.277997 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6c7c498869-wtqdv_c4579287-c56b-417d-b05f-ee78f9aea474/init/0.log" Sep 29 14:33:52 crc kubenswrapper[4611]: I0929 14:33:52.490390 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6c7c498869-wtqdv_c4579287-c56b-417d-b05f-ee78f9aea474/init/0.log" Sep 29 14:33:52 crc kubenswrapper[4611]: I0929 14:33:52.737269 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6c7c498869-wtqdv_c4579287-c56b-417d-b05f-ee78f9aea474/dnsmasq-dns/0.log" Sep 29 14:33:52 crc kubenswrapper[4611]: I0929 14:33:52.837147 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-x8n6f_9ed7d11c-f153-4632-bef8-b39a6bed2966/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:52 crc kubenswrapper[4611]: I0929 14:33:52.960509 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_8c295c03-859a-41dd-acb6-1d7f13cc0877/glance-httpd/0.log" Sep 29 14:33:53 crc kubenswrapper[4611]: I0929 14:33:53.074988 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_8c295c03-859a-41dd-acb6-1d7f13cc0877/glance-log/0.log" Sep 29 14:33:53 crc kubenswrapper[4611]: I0929 14:33:53.170063 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6139f1f7-d7fe-403b-b414-989be3576095/glance-httpd/0.log" Sep 29 14:33:53 crc kubenswrapper[4611]: I0929 14:33:53.255191 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6139f1f7-d7fe-403b-b414-989be3576095/glance-log/0.log" Sep 29 14:33:53 crc kubenswrapper[4611]: I0929 14:33:53.531689 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b7c6854c4-jrwd9_c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a/horizon/1.log" Sep 29 14:33:53 crc kubenswrapper[4611]: I0929 14:33:53.565383 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b7c6854c4-jrwd9_c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a/horizon/0.log" Sep 29 14:33:53 crc kubenswrapper[4611]: I0929 14:33:53.852521 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-8jw6v_3e06e443-7ad2-4078-9023-3605912748c6/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.010160 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b7c6854c4-jrwd9_c0b68cce-e4c2-4291-bbc5-0d096d1e0b8a/horizon-log/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.089427 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-gmx5f_11cbf559-05e0-4671-b794-f2325cb752a9/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.365451 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319181-j5cxx_aab1b06f-3b44-4f36-91cb-833959f0c9f1/keystone-cron/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.565688 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-84bd74d746-h92xg_81599dc0-616b-43ad-91ef-c033d30f1892/keystone-api/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.645803 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319241-dcp5m_be7463cf-87c5-4053-94a8-162e3c310e92/keystone-cron/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.785938 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_8a21947f-1b2a-4360-824a-b05cb82ed84d/kube-state-metrics/0.log" Sep 29 14:33:54 crc kubenswrapper[4611]: I0929 14:33:54.902801 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-rhthx_678b329e-0ba8-4901-94e3-51738d9317c0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:55 crc kubenswrapper[4611]: I0929 14:33:55.558412 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-697f494f47-d2wfq_d27ae389-a34b-48a0-b349-8ff7e3268e40/neutron-api/0.log" Sep 29 14:33:55 crc kubenswrapper[4611]: I0929 14:33:55.632891 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-697f494f47-d2wfq_d27ae389-a34b-48a0-b349-8ff7e3268e40/neutron-httpd/0.log" Sep 29 14:33:55 crc kubenswrapper[4611]: I0929 14:33:55.759185 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-n582h_42a9b4b7-e479-4c75-9713-d80f50ff45d8/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:56 crc kubenswrapper[4611]: I0929 14:33:56.815706 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ed4e5355-c0d7-4a82-ad50-07e42cd38045/nova-cell0-conductor-conductor/0.log" Sep 29 14:33:57 crc kubenswrapper[4611]: I0929 14:33:57.480906 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_c05ef82a-7994-4d5a-b8e3-c04701bea7fe/nova-cell1-conductor-conductor/0.log" Sep 29 14:33:57 crc kubenswrapper[4611]: I0929 14:33:57.933338 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a9c6d291-30d4-44d5-a1ec-877c30fc954f/nova-api-log/0.log" Sep 29 14:33:58 crc kubenswrapper[4611]: I0929 14:33:58.246132 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a9c6d291-30d4-44d5-a1ec-877c30fc954f/nova-api-api/0.log" Sep 29 14:33:58 crc kubenswrapper[4611]: I0929 14:33:58.252054 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_b472baaa-35dd-4c0a-be69-991eb287a0f3/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 14:33:58 crc kubenswrapper[4611]: I0929 14:33:58.657586 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-hggrd_f9d9958c-2b92-4742-b36c-eaef389b07c5/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:33:58 crc kubenswrapper[4611]: I0929 14:33:58.697226 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_38cd3c38-4553-4c07-8627-615a255435d2/nova-metadata-log/0.log" Sep 29 14:33:58 crc kubenswrapper[4611]: I0929 14:33:58.736384 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:33:58 crc kubenswrapper[4611]: E0929 14:33:58.736782 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:33:59 crc kubenswrapper[4611]: I0929 14:33:59.357115 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_9ce84f09-60a3-4214-b3cb-85aca6574a83/nova-scheduler-scheduler/0.log" Sep 29 14:33:59 crc kubenswrapper[4611]: I0929 14:33:59.688358 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_46d0113e-4eb9-4b51-981e-744b6dd0842e/mysql-bootstrap/0.log" Sep 29 14:33:59 crc kubenswrapper[4611]: I0929 14:33:59.926844 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_46d0113e-4eb9-4b51-981e-744b6dd0842e/mysql-bootstrap/0.log" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.025810 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_46d0113e-4eb9-4b51-981e-744b6dd0842e/galera/0.log" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.325800 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_367a7739-cd0c-4a45-b804-1d763d6a55f4/mysql-bootstrap/0.log" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.510477 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_367a7739-cd0c-4a45-b804-1d763d6a55f4/mysql-bootstrap/0.log" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.687834 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vjtvg"] Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.695778 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_367a7739-cd0c-4a45-b804-1d763d6a55f4/galera/0.log" Sep 29 14:34:00 crc kubenswrapper[4611]: E0929 14:34:00.696689 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="registry-server" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.696725 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="registry-server" Sep 29 14:34:00 crc kubenswrapper[4611]: E0929 14:34:00.696770 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="extract-utilities" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.696780 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="extract-utilities" Sep 29 14:34:00 crc kubenswrapper[4611]: E0929 14:34:00.696789 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="extract-content" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.696799 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="extract-content" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.697464 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d2f7c92-dd5b-4d0e-ac56-db2945ef6b2d" containerName="registry-server" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.709813 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.789421 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j8hr\" (UniqueName: \"kubernetes.io/projected/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-kube-api-access-2j8hr\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.789500 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-catalog-content\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.789596 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-utilities\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.855508 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vjtvg"] Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.891020 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j8hr\" (UniqueName: \"kubernetes.io/projected/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-kube-api-access-2j8hr\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.891128 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-catalog-content\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.891279 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-utilities\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.892245 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-catalog-content\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.896822 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-utilities\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:00 crc kubenswrapper[4611]: I0929 14:34:00.996139 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j8hr\" (UniqueName: \"kubernetes.io/projected/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-kube-api-access-2j8hr\") pod \"redhat-operators-vjtvg\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:01 crc kubenswrapper[4611]: I0929 14:34:01.055567 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:01 crc kubenswrapper[4611]: I0929 14:34:01.477314 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_f5bf612f-9341-4ddc-8525-55976ff9bedc/openstackclient/0.log" Sep 29 14:34:01 crc kubenswrapper[4611]: I0929 14:34:01.752718 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-9jsdt_b42caf5f-0509-41a3-ab3c-49b5b2be817e/ovn-controller/0.log" Sep 29 14:34:01 crc kubenswrapper[4611]: I0929 14:34:01.951353 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vjtvg"] Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.036551 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qrlpz_47b31f8c-66df-4172-8185-abba6357fc20/ovsdb-server-init/0.log" Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.311941 4611 generic.go:334] "Generic (PLEG): container finished" podID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerID="f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf" exitCode=0 Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.312170 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerDied","Data":"f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf"} Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.312196 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerStarted","Data":"7652020c43b12ca6a171922677adb9e8495974935b3b7d2e3eb0f1cb5a36f287"} Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.408109 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qrlpz_47b31f8c-66df-4172-8185-abba6357fc20/ovsdb-server-init/0.log" Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.435039 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_38cd3c38-4553-4c07-8627-615a255435d2/nova-metadata-metadata/0.log" Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.693512 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qrlpz_47b31f8c-66df-4172-8185-abba6357fc20/ovsdb-server/0.log" Sep 29 14:34:02 crc kubenswrapper[4611]: I0929 14:34:02.778114 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qrlpz_47b31f8c-66df-4172-8185-abba6357fc20/ovs-vswitchd/0.log" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.013046 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4bfcb"] Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.026216 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.049514 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4bfcb"] Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.076136 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-prd7l_a58bbd72-3a85-4f21-9d48-d7bfcc527310/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.107692 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-6dcc6c48fd-xwrxv_3a3e35e4-6b43-415c-871d-ab6903b9d24a/ovn-northd/0.log" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.181741 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-catalog-content\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.182097 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn4m2\" (UniqueName: \"kubernetes.io/projected/d630aa78-d4bc-42e0-81ae-312da9f6511c-kube-api-access-rn4m2\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.182127 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-utilities\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.307428 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-catalog-content\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.307547 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn4m2\" (UniqueName: \"kubernetes.io/projected/d630aa78-d4bc-42e0-81ae-312da9f6511c-kube-api-access-rn4m2\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.307586 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-utilities\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.309547 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-utilities\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.310791 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-catalog-content\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.350579 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn4m2\" (UniqueName: \"kubernetes.io/projected/d630aa78-d4bc-42e0-81ae-312da9f6511c-kube-api-access-rn4m2\") pod \"community-operators-4bfcb\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.375093 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.701489 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9d360e26-9efd-4619-a0fc-77ac5eada7d0/ovsdbserver-nb/0.log" Sep 29 14:34:03 crc kubenswrapper[4611]: I0929 14:34:03.909317 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6e73133c-5010-47af-a2e8-df18d77a3f42/ovsdbserver-sb/0.log" Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.088528 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4bfcb"] Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.398869 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerStarted","Data":"18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc"} Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.404610 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerStarted","Data":"eede9e0669520239e685cf0c04a328d3f51d54508aa5efbd99b186342d25e969"} Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.404716 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerStarted","Data":"c0c717aeb520585e4dfed4fded4aa682ca203598ac3851731d4db970dc09bef8"} Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.524112 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-f475d4f88-9gwgs_6fd6bde4-17d7-4854-9238-4492968338d8/placement-api/0.log" Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.652493 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-f475d4f88-9gwgs_6fd6bde4-17d7-4854-9238-4492968338d8/placement-log/0.log" Sep 29 14:34:04 crc kubenswrapper[4611]: I0929 14:34:04.804956 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f68a498f-2ca9-4462-a6dd-e77c69312c95/init-config-reloader/0.log" Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.135211 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f68a498f-2ca9-4462-a6dd-e77c69312c95/prometheus/0.log" Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.207808 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f68a498f-2ca9-4462-a6dd-e77c69312c95/config-reloader/0.log" Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.231592 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f68a498f-2ca9-4462-a6dd-e77c69312c95/init-config-reloader/0.log" Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.432189 4611 generic.go:334] "Generic (PLEG): container finished" podID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerID="eede9e0669520239e685cf0c04a328d3f51d54508aa5efbd99b186342d25e969" exitCode=0 Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.433611 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerDied","Data":"eede9e0669520239e685cf0c04a328d3f51d54508aa5efbd99b186342d25e969"} Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.586940 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f68a498f-2ca9-4462-a6dd-e77c69312c95/thanos-sidecar/0.log" Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.587495 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8646b629-62bf-4405-b9ec-e2bcbceeb8bb/setup-container/0.log" Sep 29 14:34:05 crc kubenswrapper[4611]: I0929 14:34:05.920077 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8646b629-62bf-4405-b9ec-e2bcbceeb8bb/setup-container/0.log" Sep 29 14:34:06 crc kubenswrapper[4611]: I0929 14:34:06.118807 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8646b629-62bf-4405-b9ec-e2bcbceeb8bb/rabbitmq/0.log" Sep 29 14:34:06 crc kubenswrapper[4611]: I0929 14:34:06.291431 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_3b39f6c6-fcef-4959-b3ca-2e18f587762e/setup-container/0.log" Sep 29 14:34:06 crc kubenswrapper[4611]: I0929 14:34:06.509786 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_3b39f6c6-fcef-4959-b3ca-2e18f587762e/setup-container/0.log" Sep 29 14:34:06 crc kubenswrapper[4611]: I0929 14:34:06.584703 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_3b39f6c6-fcef-4959-b3ca-2e18f587762e/rabbitmq/0.log" Sep 29 14:34:06 crc kubenswrapper[4611]: I0929 14:34:06.948792 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-kbj2z_366b3f96-580c-4545-b8b5-4e776b70e6c0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:07 crc kubenswrapper[4611]: I0929 14:34:07.099142 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-dvcs4_4e6fc46f-36a2-4d36-a82e-877539513437/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:07 crc kubenswrapper[4611]: I0929 14:34:07.315008 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-2878w_642768bf-2945-467e-bed5-c02808905701/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:07 crc kubenswrapper[4611]: I0929 14:34:07.467508 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerStarted","Data":"45e924151df9fdcd633bd322a830b2f5cbfe995e809dd0660c7c3ca139fbcbee"} Sep 29 14:34:07 crc kubenswrapper[4611]: I0929 14:34:07.601733 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-2kljd_fa84a9af-7a10-4a0b-8391-cc5db50e5275/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:07 crc kubenswrapper[4611]: I0929 14:34:07.925745 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-d57fs_0edd42a5-c566-4c3d-a2d9-6d9568c5396d/ssh-known-hosts-edpm-deployment/0.log" Sep 29 14:34:08 crc kubenswrapper[4611]: I0929 14:34:08.334557 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-54fd444d4f-vmksq_cf23ea05-4538-4fed-bb3d-07d009f400bd/proxy-server/0.log" Sep 29 14:34:08 crc kubenswrapper[4611]: I0929 14:34:08.388272 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-54fd444d4f-vmksq_cf23ea05-4538-4fed-bb3d-07d009f400bd/proxy-httpd/0.log" Sep 29 14:34:08 crc kubenswrapper[4611]: I0929 14:34:08.614432 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-z9w9b_b548cb20-950c-4d83-b7e1-c910375a4bf0/swift-ring-rebalance/0.log" Sep 29 14:34:08 crc kubenswrapper[4611]: I0929 14:34:08.694658 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/account-auditor/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.088586 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/account-reaper/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.159188 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/account-server/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.197230 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/account-replicator/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.353428 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/container-auditor/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.494426 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/container-server/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.535157 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/container-replicator/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.732914 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/container-updater/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.868155 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/object-expirer/0.log" Sep 29 14:34:09 crc kubenswrapper[4611]: I0929 14:34:09.994082 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/object-auditor/0.log" Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.117711 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/object-replicator/0.log" Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.234271 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/object-server/0.log" Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.265610 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/object-updater/0.log" Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.451015 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/rsync/0.log" Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.510925 4611 generic.go:334] "Generic (PLEG): container finished" podID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerID="45e924151df9fdcd633bd322a830b2f5cbfe995e809dd0660c7c3ca139fbcbee" exitCode=0 Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.510989 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerDied","Data":"45e924151df9fdcd633bd322a830b2f5cbfe995e809dd0660c7c3ca139fbcbee"} Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.523835 4611 generic.go:334] "Generic (PLEG): container finished" podID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerID="18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc" exitCode=0 Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.524037 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerDied","Data":"18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc"} Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.543402 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_458f3780-8709-4a3c-ac9e-9a1b5ced2172/swift-recon-cron/0.log" Sep 29 14:34:10 crc kubenswrapper[4611]: I0929 14:34:10.868270 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hxnrc_0cacd54c-23e1-40c3-963a-33bd7c91a0ad/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.058823 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2d140fa2-fe3d-4e16-810f-c9b568c4554c/tempest-tests-tempest-tests-runner/0.log" Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.456348 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9059c1f4-16be-468b-82ac-58311a3d727d/test-operator-logs-container/0.log" Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.535643 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerStarted","Data":"4b48f6f26158060ffe02c3297963a5dd1774f4b4fa61d23fcffc498fe759d0b4"} Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.539854 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerStarted","Data":"905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086"} Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.563724 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4bfcb" podStartSLOduration=4.057044305 podStartE2EDuration="9.560781493s" podCreationTimestamp="2025-09-29 14:34:02 +0000 UTC" firstStartedPulling="2025-09-29 14:34:05.43617479 +0000 UTC m=+6832.327694396" lastFinishedPulling="2025-09-29 14:34:10.939911978 +0000 UTC m=+6837.831431584" observedRunningTime="2025-09-29 14:34:11.55515007 +0000 UTC m=+6838.446669676" watchObservedRunningTime="2025-09-29 14:34:11.560781493 +0000 UTC m=+6838.452301099" Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.583741 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vjtvg" podStartSLOduration=2.797172788 podStartE2EDuration="11.583719864s" podCreationTimestamp="2025-09-29 14:34:00 +0000 UTC" firstStartedPulling="2025-09-29 14:34:02.313649822 +0000 UTC m=+6829.205169428" lastFinishedPulling="2025-09-29 14:34:11.100196898 +0000 UTC m=+6837.991716504" observedRunningTime="2025-09-29 14:34:11.579958976 +0000 UTC m=+6838.471478582" watchObservedRunningTime="2025-09-29 14:34:11.583719864 +0000 UTC m=+6838.475239460" Sep 29 14:34:11 crc kubenswrapper[4611]: I0929 14:34:11.873753 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-gtjln_51d1eee6-844a-4026-a4c8-8bc1ec752b77/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:34:12 crc kubenswrapper[4611]: I0929 14:34:12.705417 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_1d3c3ace-8166-40ee-aee1-76bca836555e/watcher-api/0.log" Sep 29 14:34:13 crc kubenswrapper[4611]: I0929 14:34:13.376290 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:13 crc kubenswrapper[4611]: I0929 14:34:13.376663 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:13 crc kubenswrapper[4611]: I0929 14:34:13.423224 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_6c448e57-326a-4c7d-9e44-be5cf8afd7ea/watcher-applier/0.log" Sep 29 14:34:13 crc kubenswrapper[4611]: I0929 14:34:13.719341 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_1d3c3ace-8166-40ee-aee1-76bca836555e/watcher-api-log/0.log" Sep 29 14:34:13 crc kubenswrapper[4611]: I0929 14:34:13.750037 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:34:14 crc kubenswrapper[4611]: I0929 14:34:14.429109 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-4bfcb" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="registry-server" probeResult="failure" output=< Sep 29 14:34:14 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:34:14 crc kubenswrapper[4611]: > Sep 29 14:34:14 crc kubenswrapper[4611]: I0929 14:34:14.627829 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"6e19b9055e3927d864ff4a7206886b48ae17d1ca0003008946b90b7c04db8e9b"} Sep 29 14:34:15 crc kubenswrapper[4611]: I0929 14:34:15.372376 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_f59308f5-6c15-48a5-b730-1543849afa05/watcher-decision-engine/0.log" Sep 29 14:34:16 crc kubenswrapper[4611]: I0929 14:34:16.446294 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_53d14921-aa05-4a37-acec-35bb89b384fb/memcached/0.log" Sep 29 14:34:21 crc kubenswrapper[4611]: I0929 14:34:21.060412 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:21 crc kubenswrapper[4611]: I0929 14:34:21.062123 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:22 crc kubenswrapper[4611]: I0929 14:34:22.121079 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vjtvg" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" probeResult="failure" output=< Sep 29 14:34:22 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:34:22 crc kubenswrapper[4611]: > Sep 29 14:34:24 crc kubenswrapper[4611]: I0929 14:34:24.430657 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-4bfcb" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="registry-server" probeResult="failure" output=< Sep 29 14:34:24 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:34:24 crc kubenswrapper[4611]: > Sep 29 14:34:32 crc kubenswrapper[4611]: I0929 14:34:32.110890 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vjtvg" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" probeResult="failure" output=< Sep 29 14:34:32 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:34:32 crc kubenswrapper[4611]: > Sep 29 14:34:33 crc kubenswrapper[4611]: I0929 14:34:33.442364 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:33 crc kubenswrapper[4611]: I0929 14:34:33.524181 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:34 crc kubenswrapper[4611]: I0929 14:34:34.028117 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4bfcb"] Sep 29 14:34:34 crc kubenswrapper[4611]: I0929 14:34:34.850288 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4bfcb" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="registry-server" containerID="cri-o://4b48f6f26158060ffe02c3297963a5dd1774f4b4fa61d23fcffc498fe759d0b4" gracePeriod=2 Sep 29 14:34:35 crc kubenswrapper[4611]: I0929 14:34:35.887025 4611 generic.go:334] "Generic (PLEG): container finished" podID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerID="4b48f6f26158060ffe02c3297963a5dd1774f4b4fa61d23fcffc498fe759d0b4" exitCode=0 Sep 29 14:34:35 crc kubenswrapper[4611]: I0929 14:34:35.887140 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerDied","Data":"4b48f6f26158060ffe02c3297963a5dd1774f4b4fa61d23fcffc498fe759d0b4"} Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.078005 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.145389 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn4m2\" (UniqueName: \"kubernetes.io/projected/d630aa78-d4bc-42e0-81ae-312da9f6511c-kube-api-access-rn4m2\") pod \"d630aa78-d4bc-42e0-81ae-312da9f6511c\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.145488 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-catalog-content\") pod \"d630aa78-d4bc-42e0-81ae-312da9f6511c\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.145513 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-utilities\") pod \"d630aa78-d4bc-42e0-81ae-312da9f6511c\" (UID: \"d630aa78-d4bc-42e0-81ae-312da9f6511c\") " Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.161133 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d630aa78-d4bc-42e0-81ae-312da9f6511c-kube-api-access-rn4m2" (OuterVolumeSpecName: "kube-api-access-rn4m2") pod "d630aa78-d4bc-42e0-81ae-312da9f6511c" (UID: "d630aa78-d4bc-42e0-81ae-312da9f6511c"). InnerVolumeSpecName "kube-api-access-rn4m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.149978 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-utilities" (OuterVolumeSpecName: "utilities") pod "d630aa78-d4bc-42e0-81ae-312da9f6511c" (UID: "d630aa78-d4bc-42e0-81ae-312da9f6511c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.262150 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn4m2\" (UniqueName: \"kubernetes.io/projected/d630aa78-d4bc-42e0-81ae-312da9f6511c-kube-api-access-rn4m2\") on node \"crc\" DevicePath \"\"" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.262517 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.273668 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d630aa78-d4bc-42e0-81ae-312da9f6511c" (UID: "d630aa78-d4bc-42e0-81ae-312da9f6511c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.364954 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d630aa78-d4bc-42e0-81ae-312da9f6511c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.899412 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bfcb" event={"ID":"d630aa78-d4bc-42e0-81ae-312da9f6511c","Type":"ContainerDied","Data":"c0c717aeb520585e4dfed4fded4aa682ca203598ac3851731d4db970dc09bef8"} Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.899464 4611 scope.go:117] "RemoveContainer" containerID="4b48f6f26158060ffe02c3297963a5dd1774f4b4fa61d23fcffc498fe759d0b4" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.899543 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bfcb" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.941094 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4bfcb"] Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.944343 4611 scope.go:117] "RemoveContainer" containerID="45e924151df9fdcd633bd322a830b2f5cbfe995e809dd0660c7c3ca139fbcbee" Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.954688 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4bfcb"] Sep 29 14:34:36 crc kubenswrapper[4611]: I0929 14:34:36.981054 4611 scope.go:117] "RemoveContainer" containerID="eede9e0669520239e685cf0c04a328d3f51d54508aa5efbd99b186342d25e969" Sep 29 14:34:37 crc kubenswrapper[4611]: I0929 14:34:37.748059 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" path="/var/lib/kubelet/pods/d630aa78-d4bc-42e0-81ae-312da9f6511c/volumes" Sep 29 14:34:42 crc kubenswrapper[4611]: I0929 14:34:42.130520 4611 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vjtvg" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" probeResult="failure" output=< Sep 29 14:34:42 crc kubenswrapper[4611]: timeout: failed to connect service ":50051" within 1s Sep 29 14:34:42 crc kubenswrapper[4611]: > Sep 29 14:34:51 crc kubenswrapper[4611]: I0929 14:34:51.116790 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:51 crc kubenswrapper[4611]: I0929 14:34:51.193951 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:51 crc kubenswrapper[4611]: I0929 14:34:51.383903 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vjtvg"] Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.040219 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vjtvg" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" containerID="cri-o://905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086" gracePeriod=2 Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.664478 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.710763 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-catalog-content\") pod \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.710890 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j8hr\" (UniqueName: \"kubernetes.io/projected/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-kube-api-access-2j8hr\") pod \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.711026 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-utilities\") pod \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\" (UID: \"3a53a511-bad2-4b87-9f98-c8c3e0e2e930\") " Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.712063 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-utilities" (OuterVolumeSpecName: "utilities") pod "3a53a511-bad2-4b87-9f98-c8c3e0e2e930" (UID: "3a53a511-bad2-4b87-9f98-c8c3e0e2e930"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.723052 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-kube-api-access-2j8hr" (OuterVolumeSpecName: "kube-api-access-2j8hr") pod "3a53a511-bad2-4b87-9f98-c8c3e0e2e930" (UID: "3a53a511-bad2-4b87-9f98-c8c3e0e2e930"). InnerVolumeSpecName "kube-api-access-2j8hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.812923 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.812952 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j8hr\" (UniqueName: \"kubernetes.io/projected/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-kube-api-access-2j8hr\") on node \"crc\" DevicePath \"\"" Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.821273 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a53a511-bad2-4b87-9f98-c8c3e0e2e930" (UID: "3a53a511-bad2-4b87-9f98-c8c3e0e2e930"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:34:53 crc kubenswrapper[4611]: I0929 14:34:53.914671 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a53a511-bad2-4b87-9f98-c8c3e0e2e930-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.064834 4611 generic.go:334] "Generic (PLEG): container finished" podID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerID="905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086" exitCode=0 Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.064906 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerDied","Data":"905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086"} Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.064939 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjtvg" event={"ID":"3a53a511-bad2-4b87-9f98-c8c3e0e2e930","Type":"ContainerDied","Data":"7652020c43b12ca6a171922677adb9e8495974935b3b7d2e3eb0f1cb5a36f287"} Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.064967 4611 scope.go:117] "RemoveContainer" containerID="905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.065229 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vjtvg" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.099547 4611 scope.go:117] "RemoveContainer" containerID="18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.121790 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vjtvg"] Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.131664 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vjtvg"] Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.139026 4611 scope.go:117] "RemoveContainer" containerID="f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.198251 4611 scope.go:117] "RemoveContainer" containerID="905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086" Sep 29 14:34:54 crc kubenswrapper[4611]: E0929 14:34:54.214001 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086\": container with ID starting with 905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086 not found: ID does not exist" containerID="905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.214048 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086"} err="failed to get container status \"905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086\": rpc error: code = NotFound desc = could not find container \"905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086\": container with ID starting with 905c362a8942a66acbfbc043636ffcc8b4db05a1a7bdb91f9c58d4af4eebc086 not found: ID does not exist" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.214078 4611 scope.go:117] "RemoveContainer" containerID="18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc" Sep 29 14:34:54 crc kubenswrapper[4611]: E0929 14:34:54.215426 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc\": container with ID starting with 18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc not found: ID does not exist" containerID="18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.215469 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc"} err="failed to get container status \"18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc\": rpc error: code = NotFound desc = could not find container \"18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc\": container with ID starting with 18877ed1eb7f6cf1e89c180bc5f1557c52a135f2a4328d2bdab4cc459149dbcc not found: ID does not exist" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.215496 4611 scope.go:117] "RemoveContainer" containerID="f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf" Sep 29 14:34:54 crc kubenswrapper[4611]: E0929 14:34:54.215761 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf\": container with ID starting with f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf not found: ID does not exist" containerID="f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf" Sep 29 14:34:54 crc kubenswrapper[4611]: I0929 14:34:54.215785 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf"} err="failed to get container status \"f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf\": rpc error: code = NotFound desc = could not find container \"f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf\": container with ID starting with f2b166f87e6840e666200c98d60053d8f636d092cb24a420d80b35435c554adf not found: ID does not exist" Sep 29 14:34:55 crc kubenswrapper[4611]: I0929 14:34:55.753974 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" path="/var/lib/kubelet/pods/3a53a511-bad2-4b87-9f98-c8c3e0e2e930/volumes" Sep 29 14:35:07 crc kubenswrapper[4611]: I0929 14:35:07.179912 4611 generic.go:334] "Generic (PLEG): container finished" podID="3a4556b8-5cf4-46d1-816c-15095ce695a4" containerID="1b7ba23579c77faa8929fb0686fddce9763028cf27c82582676f3683067e4306" exitCode=0 Sep 29 14:35:07 crc kubenswrapper[4611]: I0929 14:35:07.179999 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" event={"ID":"3a4556b8-5cf4-46d1-816c-15095ce695a4","Type":"ContainerDied","Data":"1b7ba23579c77faa8929fb0686fddce9763028cf27c82582676f3683067e4306"} Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.308713 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.342500 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-dhkjv"] Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.350349 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-dhkjv"] Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.406103 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brnkh\" (UniqueName: \"kubernetes.io/projected/3a4556b8-5cf4-46d1-816c-15095ce695a4-kube-api-access-brnkh\") pod \"3a4556b8-5cf4-46d1-816c-15095ce695a4\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.406414 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a4556b8-5cf4-46d1-816c-15095ce695a4-host\") pod \"3a4556b8-5cf4-46d1-816c-15095ce695a4\" (UID: \"3a4556b8-5cf4-46d1-816c-15095ce695a4\") " Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.406526 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3a4556b8-5cf4-46d1-816c-15095ce695a4-host" (OuterVolumeSpecName: "host") pod "3a4556b8-5cf4-46d1-816c-15095ce695a4" (UID: "3a4556b8-5cf4-46d1-816c-15095ce695a4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.407149 4611 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a4556b8-5cf4-46d1-816c-15095ce695a4-host\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.413189 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a4556b8-5cf4-46d1-816c-15095ce695a4-kube-api-access-brnkh" (OuterVolumeSpecName: "kube-api-access-brnkh") pod "3a4556b8-5cf4-46d1-816c-15095ce695a4" (UID: "3a4556b8-5cf4-46d1-816c-15095ce695a4"). InnerVolumeSpecName "kube-api-access-brnkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:35:08 crc kubenswrapper[4611]: I0929 14:35:08.509226 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brnkh\" (UniqueName: \"kubernetes.io/projected/3a4556b8-5cf4-46d1-816c-15095ce695a4-kube-api-access-brnkh\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.210423 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="260f85ab193b29917b9f1468d559988446a878825754a8d6ae5018b884e35eca" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.210539 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-dhkjv" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.559913 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-n6442"] Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563499 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="extract-content" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563601 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="extract-content" Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563656 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="extract-utilities" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563667 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="extract-utilities" Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563699 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="extract-content" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563709 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="extract-content" Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563726 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a4556b8-5cf4-46d1-816c-15095ce695a4" containerName="container-00" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563734 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a4556b8-5cf4-46d1-816c-15095ce695a4" containerName="container-00" Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563750 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="extract-utilities" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563757 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="extract-utilities" Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563779 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="registry-server" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563786 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="registry-server" Sep 29 14:35:09 crc kubenswrapper[4611]: E0929 14:35:09.563805 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.563813 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.564833 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="d630aa78-d4bc-42e0-81ae-312da9f6511c" containerName="registry-server" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.564875 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a4556b8-5cf4-46d1-816c-15095ce695a4" containerName="container-00" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.564891 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a53a511-bad2-4b87-9f98-c8c3e0e2e930" containerName="registry-server" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.568105 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.730960 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b7a17e98-03ee-41ae-a318-ce92d080937c-host\") pod \"crc-debug-n6442\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.731019 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpr4m\" (UniqueName: \"kubernetes.io/projected/b7a17e98-03ee-41ae-a318-ce92d080937c-kube-api-access-zpr4m\") pod \"crc-debug-n6442\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.749263 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a4556b8-5cf4-46d1-816c-15095ce695a4" path="/var/lib/kubelet/pods/3a4556b8-5cf4-46d1-816c-15095ce695a4/volumes" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.833750 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b7a17e98-03ee-41ae-a318-ce92d080937c-host\") pod \"crc-debug-n6442\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.833833 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpr4m\" (UniqueName: \"kubernetes.io/projected/b7a17e98-03ee-41ae-a318-ce92d080937c-kube-api-access-zpr4m\") pod \"crc-debug-n6442\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.834493 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b7a17e98-03ee-41ae-a318-ce92d080937c-host\") pod \"crc-debug-n6442\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.855909 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpr4m\" (UniqueName: \"kubernetes.io/projected/b7a17e98-03ee-41ae-a318-ce92d080937c-kube-api-access-zpr4m\") pod \"crc-debug-n6442\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:09 crc kubenswrapper[4611]: I0929 14:35:09.887248 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:10 crc kubenswrapper[4611]: I0929 14:35:10.221537 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-n6442" event={"ID":"b7a17e98-03ee-41ae-a318-ce92d080937c","Type":"ContainerStarted","Data":"d63ec77129a7ff30532d40e158975a7eb6389c5874ce24136450dbb40e5f97c5"} Sep 29 14:35:10 crc kubenswrapper[4611]: I0929 14:35:10.221914 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-n6442" event={"ID":"b7a17e98-03ee-41ae-a318-ce92d080937c","Type":"ContainerStarted","Data":"2f6e99140901ecb91579ced79c3b6e4b25b139d18d1ed015a950b25c22c6d697"} Sep 29 14:35:10 crc kubenswrapper[4611]: I0929 14:35:10.241315 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mkzpb/crc-debug-n6442" podStartSLOduration=1.240005632 podStartE2EDuration="1.240005632s" podCreationTimestamp="2025-09-29 14:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:35:10.235565894 +0000 UTC m=+6897.127085510" watchObservedRunningTime="2025-09-29 14:35:10.240005632 +0000 UTC m=+6897.131525228" Sep 29 14:35:11 crc kubenswrapper[4611]: I0929 14:35:11.236143 4611 generic.go:334] "Generic (PLEG): container finished" podID="b7a17e98-03ee-41ae-a318-ce92d080937c" containerID="d63ec77129a7ff30532d40e158975a7eb6389c5874ce24136450dbb40e5f97c5" exitCode=0 Sep 29 14:35:11 crc kubenswrapper[4611]: I0929 14:35:11.236238 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-n6442" event={"ID":"b7a17e98-03ee-41ae-a318-ce92d080937c","Type":"ContainerDied","Data":"d63ec77129a7ff30532d40e158975a7eb6389c5874ce24136450dbb40e5f97c5"} Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.375378 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.474259 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpr4m\" (UniqueName: \"kubernetes.io/projected/b7a17e98-03ee-41ae-a318-ce92d080937c-kube-api-access-zpr4m\") pod \"b7a17e98-03ee-41ae-a318-ce92d080937c\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.474409 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b7a17e98-03ee-41ae-a318-ce92d080937c-host\") pod \"b7a17e98-03ee-41ae-a318-ce92d080937c\" (UID: \"b7a17e98-03ee-41ae-a318-ce92d080937c\") " Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.474785 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b7a17e98-03ee-41ae-a318-ce92d080937c-host" (OuterVolumeSpecName: "host") pod "b7a17e98-03ee-41ae-a318-ce92d080937c" (UID: "b7a17e98-03ee-41ae-a318-ce92d080937c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.503614 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7a17e98-03ee-41ae-a318-ce92d080937c-kube-api-access-zpr4m" (OuterVolumeSpecName: "kube-api-access-zpr4m") pod "b7a17e98-03ee-41ae-a318-ce92d080937c" (UID: "b7a17e98-03ee-41ae-a318-ce92d080937c"). InnerVolumeSpecName "kube-api-access-zpr4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.577400 4611 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b7a17e98-03ee-41ae-a318-ce92d080937c-host\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:12 crc kubenswrapper[4611]: I0929 14:35:12.577463 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpr4m\" (UniqueName: \"kubernetes.io/projected/b7a17e98-03ee-41ae-a318-ce92d080937c-kube-api-access-zpr4m\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:13 crc kubenswrapper[4611]: I0929 14:35:13.255713 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-n6442" event={"ID":"b7a17e98-03ee-41ae-a318-ce92d080937c","Type":"ContainerDied","Data":"2f6e99140901ecb91579ced79c3b6e4b25b139d18d1ed015a950b25c22c6d697"} Sep 29 14:35:13 crc kubenswrapper[4611]: I0929 14:35:13.255754 4611 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f6e99140901ecb91579ced79c3b6e4b25b139d18d1ed015a950b25c22c6d697" Sep 29 14:35:13 crc kubenswrapper[4611]: I0929 14:35:13.255766 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-n6442" Sep 29 14:35:19 crc kubenswrapper[4611]: I0929 14:35:19.390670 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-n6442"] Sep 29 14:35:19 crc kubenswrapper[4611]: I0929 14:35:19.399945 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-n6442"] Sep 29 14:35:19 crc kubenswrapper[4611]: I0929 14:35:19.754387 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7a17e98-03ee-41ae-a318-ce92d080937c" path="/var/lib/kubelet/pods/b7a17e98-03ee-41ae-a318-ce92d080937c/volumes" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.609860 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-pxnp7"] Sep 29 14:35:20 crc kubenswrapper[4611]: E0929 14:35:20.610254 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7a17e98-03ee-41ae-a318-ce92d080937c" containerName="container-00" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.610267 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7a17e98-03ee-41ae-a318-ce92d080937c" containerName="container-00" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.610470 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7a17e98-03ee-41ae-a318-ce92d080937c" containerName="container-00" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.611096 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.708974 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5a78741-590d-4716-bfbc-3d4a30c4fcda-host\") pod \"crc-debug-pxnp7\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.709212 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmk8l\" (UniqueName: \"kubernetes.io/projected/f5a78741-590d-4716-bfbc-3d4a30c4fcda-kube-api-access-hmk8l\") pod \"crc-debug-pxnp7\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.810896 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5a78741-590d-4716-bfbc-3d4a30c4fcda-host\") pod \"crc-debug-pxnp7\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.810950 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmk8l\" (UniqueName: \"kubernetes.io/projected/f5a78741-590d-4716-bfbc-3d4a30c4fcda-kube-api-access-hmk8l\") pod \"crc-debug-pxnp7\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.811671 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5a78741-590d-4716-bfbc-3d4a30c4fcda-host\") pod \"crc-debug-pxnp7\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.834466 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmk8l\" (UniqueName: \"kubernetes.io/projected/f5a78741-590d-4716-bfbc-3d4a30c4fcda-kube-api-access-hmk8l\") pod \"crc-debug-pxnp7\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:20 crc kubenswrapper[4611]: I0929 14:35:20.930761 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:21 crc kubenswrapper[4611]: I0929 14:35:21.331737 4611 generic.go:334] "Generic (PLEG): container finished" podID="f5a78741-590d-4716-bfbc-3d4a30c4fcda" containerID="859fc29032c288976cf7e5f4784114d026e2c25650527847e212bd5f171e99a2" exitCode=0 Sep 29 14:35:21 crc kubenswrapper[4611]: I0929 14:35:21.331835 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" event={"ID":"f5a78741-590d-4716-bfbc-3d4a30c4fcda","Type":"ContainerDied","Data":"859fc29032c288976cf7e5f4784114d026e2c25650527847e212bd5f171e99a2"} Sep 29 14:35:21 crc kubenswrapper[4611]: I0929 14:35:21.332098 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" event={"ID":"f5a78741-590d-4716-bfbc-3d4a30c4fcda","Type":"ContainerStarted","Data":"3ce574756e3eaf428684e6133c0198d830ce84377b58be75f43911f66fd63792"} Sep 29 14:35:21 crc kubenswrapper[4611]: I0929 14:35:21.365119 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-pxnp7"] Sep 29 14:35:21 crc kubenswrapper[4611]: I0929 14:35:21.374699 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mkzpb/crc-debug-pxnp7"] Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.450732 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.539820 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmk8l\" (UniqueName: \"kubernetes.io/projected/f5a78741-590d-4716-bfbc-3d4a30c4fcda-kube-api-access-hmk8l\") pod \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.540154 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5a78741-590d-4716-bfbc-3d4a30c4fcda-host\") pod \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\" (UID: \"f5a78741-590d-4716-bfbc-3d4a30c4fcda\") " Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.540397 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5a78741-590d-4716-bfbc-3d4a30c4fcda-host" (OuterVolumeSpecName: "host") pod "f5a78741-590d-4716-bfbc-3d4a30c4fcda" (UID: "f5a78741-590d-4716-bfbc-3d4a30c4fcda"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.542152 4611 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5a78741-590d-4716-bfbc-3d4a30c4fcda-host\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.547867 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5a78741-590d-4716-bfbc-3d4a30c4fcda-kube-api-access-hmk8l" (OuterVolumeSpecName: "kube-api-access-hmk8l") pod "f5a78741-590d-4716-bfbc-3d4a30c4fcda" (UID: "f5a78741-590d-4716-bfbc-3d4a30c4fcda"). InnerVolumeSpecName "kube-api-access-hmk8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:35:22 crc kubenswrapper[4611]: I0929 14:35:22.643366 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmk8l\" (UniqueName: \"kubernetes.io/projected/f5a78741-590d-4716-bfbc-3d4a30c4fcda-kube-api-access-hmk8l\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.350171 4611 scope.go:117] "RemoveContainer" containerID="859fc29032c288976cf7e5f4784114d026e2c25650527847e212bd5f171e99a2" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.350699 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/crc-debug-pxnp7" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.443554 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/util/0.log" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.636893 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/pull/0.log" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.640084 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/util/0.log" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.693928 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/pull/0.log" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.745841 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5a78741-590d-4716-bfbc-3d4a30c4fcda" path="/var/lib/kubelet/pods/f5a78741-590d-4716-bfbc-3d4a30c4fcda/volumes" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.823853 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/util/0.log" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.857666 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/pull/0.log" Sep 29 14:35:23 crc kubenswrapper[4611]: I0929 14:35:23.941129 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_11265dc60c026e63a8038b5bbbafc9f5554af30d365cecba952729ec36b25vk_f1038ae1-2330-481e-915d-bf84f8cdbd07/extract/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.014151 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/util/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.210439 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/util/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.246451 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/pull/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.267540 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/pull/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.504403 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/pull/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.538365 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/util/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.543414 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a0197fcee2eb7996e28f65d0feeef710a2bc8677a4b36f760dc2c7ce1g9hkh_83608d6a-9e03-49d5-8e0e-762b5dacbf7b/extract/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.750401 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7fdd8988b7-527vk_4f637c90-4822-4587-922d-3dbf2240977b/kube-rbac-proxy/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.808484 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7fdd8988b7-527vk_4f637c90-4822-4587-922d-3dbf2240977b/manager/0.log" Sep 29 14:35:24 crc kubenswrapper[4611]: I0929 14:35:24.869819 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5d9d689896-m94tx_6e145eda-2d1e-414d-b09c-b78dc328af46/kube-rbac-proxy/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.033490 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5d9d689896-m94tx_6e145eda-2d1e-414d-b09c-b78dc328af46/manager/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.070669 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-54767c9785-ph2fv_09ccaa74-5871-4408-8476-54e35b95a774/kube-rbac-proxy/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.084494 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-54767c9785-ph2fv_09ccaa74-5871-4408-8476-54e35b95a774/manager/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.279653 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7c68997f6b-thmfm_5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b/kube-rbac-proxy/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.377898 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7c68997f6b-thmfm_5e685e71-ed8b-43b9-a6b9-3e1a1d9eec1b/manager/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.491994 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-7bb9679997-2fcdq_c01aecec-3545-4b0d-a81f-0440b1cc2c19/kube-rbac-proxy/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.565069 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-7bb9679997-2fcdq_c01aecec-3545-4b0d-a81f-0440b1cc2c19/manager/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.645532 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-769bb6b489-8mwgc_198f1c1f-a452-4e1a-be6a-7bcfbe372441/kube-rbac-proxy/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.754849 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-769bb6b489-8mwgc_198f1c1f-a452-4e1a-be6a-7bcfbe372441/manager/0.log" Sep 29 14:35:25 crc kubenswrapper[4611]: I0929 14:35:25.858248 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-b758b5fbd-w8r7q_c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.024724 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-b758b5fbd-w8r7q_c09c3fe6-6fb4-457f-b7a9-fdaa52541ce6/manager/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.118557 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-68ccf47b7f-dr6tt_78aeae10-6ff4-4ec2-9a6e-617b5b774122/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.123705 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-68ccf47b7f-dr6tt_78aeae10-6ff4-4ec2-9a6e-617b5b774122/manager/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.239004 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-747665895-hdcxr_6cb9eef2-e10a-4a0c-bf29-8ade30f57048/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.439382 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-747665895-hdcxr_6cb9eef2-e10a-4a0c-bf29-8ade30f57048/manager/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.463304 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-8b756d9b7-t9stx_c162813b-a3c1-4d12-a3ec-5ecb784c56da/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.492821 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-8b756d9b7-t9stx_c162813b-a3c1-4d12-a3ec-5ecb784c56da/manager/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.677115 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5dfc69dd64-k6z9q_043571f9-41a3-4573-a1a5-f50f80be69e9/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.695858 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5dfc69dd64-k6z9q_043571f9-41a3-4573-a1a5-f50f80be69e9/manager/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.863362 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5869b4f857-md24f_20d2ac56-4812-4211-82c2-787ece927b52/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.944139 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-cf9757659-9xvq5_b63c6e5e-8aed-46b0-847a-d7a129e56281/kube-rbac-proxy/0.log" Sep 29 14:35:26 crc kubenswrapper[4611]: I0929 14:35:26.970647 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5869b4f857-md24f_20d2ac56-4812-4211-82c2-787ece927b52/manager/0.log" Sep 29 14:35:27 crc kubenswrapper[4611]: I0929 14:35:27.207892 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-cf9757659-9xvq5_b63c6e5e-8aed-46b0-847a-d7a129e56281/manager/0.log" Sep 29 14:35:27 crc kubenswrapper[4611]: I0929 14:35:27.247391 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-577fccdf59-w6qg5_9774896f-6656-4059-84b3-1e40fe0b5a30/manager/0.log" Sep 29 14:35:27 crc kubenswrapper[4611]: I0929 14:35:27.252431 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-577fccdf59-w6qg5_9774896f-6656-4059-84b3-1e40fe0b5a30/kube-rbac-proxy/0.log" Sep 29 14:35:27 crc kubenswrapper[4611]: I0929 14:35:27.430821 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl_eaa8a3d9-b8aa-4524-9e85-3e56463484f8/kube-rbac-proxy/0.log" Sep 29 14:35:27 crc kubenswrapper[4611]: I0929 14:35:27.468849 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-647c7ff67bwj4cl_eaa8a3d9-b8aa-4524-9e85-3e56463484f8/manager/0.log" Sep 29 14:35:27 crc kubenswrapper[4611]: I0929 14:35:27.879806 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-754df57b6f-4hjqs_684aa388-0688-47d5-94fc-3dc35ee44c84/kube-rbac-proxy/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.053081 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6f887f99fd-2t2zd_2db5cdab-8171-4cb8-9bb1-065c1d194657/kube-rbac-proxy/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.330389 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nbhph_503e11a8-b465-4721-a817-fd82011936eb/registry-server/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.374544 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6f887f99fd-2t2zd_2db5cdab-8171-4cb8-9bb1-065c1d194657/operator/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.570730 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-645f75c974-l5dcb_37ff3b92-ad5d-43ef-a942-b4dcd472c9c5/kube-rbac-proxy/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.654400 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-645f75c974-l5dcb_37ff3b92-ad5d-43ef-a942-b4dcd472c9c5/manager/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.834638 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d8d5f5cf9-jvqfh_191225c7-d674-4c8d-9a15-7704f1dc80fb/kube-rbac-proxy/0.log" Sep 29 14:35:28 crc kubenswrapper[4611]: I0929 14:35:28.950506 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d8d5f5cf9-jvqfh_191225c7-d674-4c8d-9a15-7704f1dc80fb/manager/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.037480 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-hkpgs_4ec018f9-0388-4dac-af1d-75d43cfc0f89/operator/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.055172 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-754df57b6f-4hjqs_684aa388-0688-47d5-94fc-3dc35ee44c84/manager/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.359030 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-857777455b-wqpzn_2348e002-3282-492c-a309-3e5b9eacfefd/kube-rbac-proxy/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.526387 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-857777455b-wqpzn_2348e002-3282-492c-a309-3e5b9eacfefd/manager/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.618184 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6fdf4565bc-8d47j_5feb7075-b56c-40a0-bab9-9205bcc973f0/kube-rbac-proxy/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.717534 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6fdf4565bc-8d47j_5feb7075-b56c-40a0-bab9-9205bcc973f0/manager/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.783891 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5f4f4847c9-tq7mf_4365233e-5b3b-4d90-8497-32deefcdc842/kube-rbac-proxy/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.843483 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5f4f4847c9-tq7mf_4365233e-5b3b-4d90-8497-32deefcdc842/manager/0.log" Sep 29 14:35:29 crc kubenswrapper[4611]: I0929 14:35:29.948019 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-659c84d44d-kp6fw_95c1a320-ab1a-4f16-ae1b-1cb890574834/manager/0.log" Sep 29 14:35:30 crc kubenswrapper[4611]: I0929 14:35:30.008541 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-659c84d44d-kp6fw_95c1a320-ab1a-4f16-ae1b-1cb890574834/kube-rbac-proxy/0.log" Sep 29 14:35:30 crc kubenswrapper[4611]: I0929 14:35:30.106513 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-index-hw62p_267f8e93-87ba-483d-a4e3-a5c2cf82b772/registry-server/0.log" Sep 29 14:35:46 crc kubenswrapper[4611]: I0929 14:35:46.520775 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wwvsv_7ed1460d-346f-4749-8a8d-107744d6b4a3/control-plane-machine-set-operator/0.log" Sep 29 14:35:46 crc kubenswrapper[4611]: I0929 14:35:46.716916 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-kv8lp_14166aa7-7554-4165-9a14-f222a13d3c82/kube-rbac-proxy/0.log" Sep 29 14:35:46 crc kubenswrapper[4611]: I0929 14:35:46.718100 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-kv8lp_14166aa7-7554-4165-9a14-f222a13d3c82/machine-api-operator/0.log" Sep 29 14:35:58 crc kubenswrapper[4611]: I0929 14:35:58.989198 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-9pzh9_9c6eb535-520e-4d74-b699-f2aa3b5b5d8c/cert-manager-controller/0.log" Sep 29 14:35:59 crc kubenswrapper[4611]: I0929 14:35:59.076126 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-2flkk_1c273b1f-5284-46b7-8167-05e3bcc66102/cert-manager-cainjector/0.log" Sep 29 14:35:59 crc kubenswrapper[4611]: I0929 14:35:59.198515 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-krmvg_dbaafa13-738a-4c17-a4d0-b52614e7ae87/cert-manager-webhook/0.log" Sep 29 14:36:11 crc kubenswrapper[4611]: I0929 14:36:11.723973 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-n4kjj_f71eaddf-7657-4ba9-a010-aaf7ef007f6e/nmstate-console-plugin/0.log" Sep 29 14:36:11 crc kubenswrapper[4611]: I0929 14:36:11.862706 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xq52r_9eba9e14-2b7a-4874-a48b-26cbee1d9c6d/nmstate-handler/0.log" Sep 29 14:36:11 crc kubenswrapper[4611]: I0929 14:36:11.940608 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-xh6gk_46ee623a-0fc0-4e35-a3de-96d6f2cbacb3/kube-rbac-proxy/0.log" Sep 29 14:36:11 crc kubenswrapper[4611]: I0929 14:36:11.970812 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-xh6gk_46ee623a-0fc0-4e35-a3de-96d6f2cbacb3/nmstate-metrics/0.log" Sep 29 14:36:12 crc kubenswrapper[4611]: I0929 14:36:12.111642 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-rmq66_aeb9e2d5-fee2-4175-bca2-0b41f8955b5e/nmstate-operator/0.log" Sep 29 14:36:12 crc kubenswrapper[4611]: I0929 14:36:12.215020 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-xgvs7_715ae13d-ad60-4871-a0f3-9f3575718223/nmstate-webhook/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.097176 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-777x6_e99966f9-0316-4285-aab7-deb192348231/kube-rbac-proxy/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.312568 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-777x6_e99966f9-0316-4285-aab7-deb192348231/controller/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.430211 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-frr-files/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.545690 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-frr-files/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.553473 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-reloader/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.603481 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-metrics/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.669520 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-reloader/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.835979 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-reloader/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.845511 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-metrics/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.871202 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-frr-files/0.log" Sep 29 14:36:26 crc kubenswrapper[4611]: I0929 14:36:26.893794 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-metrics/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.124178 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-reloader/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.148983 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-frr-files/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.168255 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/cp-metrics/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.200685 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/controller/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.409932 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/frr-metrics/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.442503 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/kube-rbac-proxy/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.491183 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/kube-rbac-proxy-frr/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.661191 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/reloader/0.log" Sep 29 14:36:27 crc kubenswrapper[4611]: I0929 14:36:27.752313 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-8zvrg_1ca8494f-d4cf-483e-ac41-ebdaa4f585a2/frr-k8s-webhook-server/0.log" Sep 29 14:36:28 crc kubenswrapper[4611]: I0929 14:36:28.007912 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-857688c5cb-tvmnh_0a1613d5-6707-489a-b5a8-f8aa95ebc744/manager/0.log" Sep 29 14:36:28 crc kubenswrapper[4611]: I0929 14:36:28.218910 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5b6b57cd56-hx5bf_c9329be6-e276-49b8-b4ff-89b18b9c350b/webhook-server/0.log" Sep 29 14:36:28 crc kubenswrapper[4611]: I0929 14:36:28.343276 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gfxw6_f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5/kube-rbac-proxy/0.log" Sep 29 14:36:29 crc kubenswrapper[4611]: I0929 14:36:29.101328 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gfxw6_f57fc1c1-c6a7-40e5-8ac6-9cd8650b8af5/speaker/0.log" Sep 29 14:36:29 crc kubenswrapper[4611]: I0929 14:36:29.229518 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2bsvt_a0fdd196-cac0-4d07-93f0-d33fac04af37/frr/0.log" Sep 29 14:36:34 crc kubenswrapper[4611]: I0929 14:36:34.630660 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:36:34 crc kubenswrapper[4611]: I0929 14:36:34.633472 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:36:40 crc kubenswrapper[4611]: I0929 14:36:40.838638 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/util/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.084372 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/util/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.092101 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/pull/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.133846 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/pull/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.275374 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/extract/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.296558 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/pull/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.328907 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bclc89h_6c55cf0b-ccfb-4a3f-9a3e-c66277f7d807/util/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.464933 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/util/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.632156 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/util/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.666820 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/pull/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.667874 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/pull/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.891458 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/util/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.926477 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/pull/0.log" Sep 29 14:36:41 crc kubenswrapper[4611]: I0929 14:36:41.939615 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtfxh2_93a499b8-40a0-4d68-aa6e-df5aaf7f5e21/extract/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.110434 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/extract-utilities/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.286652 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/extract-content/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.293055 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/extract-utilities/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.338966 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/extract-content/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.561123 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/extract-content/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.561479 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/extract-utilities/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.836371 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/extract-utilities/0.log" Sep 29 14:36:42 crc kubenswrapper[4611]: I0929 14:36:42.900191 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-npx8p_983bac02-54cb-47f1-bc9d-4ff404002926/registry-server/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.002292 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/extract-utilities/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.053236 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/extract-content/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.093978 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/extract-content/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.233204 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/extract-content/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.359018 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/extract-utilities/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.603043 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/util/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.899586 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/pull/0.log" Sep 29 14:36:43 crc kubenswrapper[4611]: I0929 14:36:43.977259 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/pull/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.057841 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/util/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.229706 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/util/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.304625 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/pull/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.309144 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kfmlj_1dad5bc8-c0f2-437f-82f1-d516c6738eeb/registry-server/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.400348 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96vz6b7_d499cc8a-da8a-4fe1-a8f1-8dcb11bd0ba6/extract/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.658742 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/extract-utilities/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.744212 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7s6xd_471e6196-70f6-4f58-b544-aec3c50ec4b7/marketplace-operator/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.914794 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/extract-utilities/0.log" Sep 29 14:36:44 crc kubenswrapper[4611]: I0929 14:36:44.993941 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/extract-content/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.003929 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/extract-content/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.163195 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/extract-utilities/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.262073 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/extract-utilities/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.295450 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/extract-content/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.386338 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l98cw_77747050-735f-46d8-a725-dfc31764b0e7/registry-server/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.499157 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/extract-utilities/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.501077 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/extract-content/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.563288 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/extract-content/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.712135 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/extract-utilities/0.log" Sep 29 14:36:45 crc kubenswrapper[4611]: I0929 14:36:45.721881 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/extract-content/0.log" Sep 29 14:36:46 crc kubenswrapper[4611]: I0929 14:36:46.577568 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-c6d4c_3413e896-e275-4fab-81c4-c03efb31fcc6/registry-server/0.log" Sep 29 14:36:58 crc kubenswrapper[4611]: I0929 14:36:58.441558 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-wqk5b_00915c37-bca5-4549-952b-ed9a40de0aa7/prometheus-operator/0.log" Sep 29 14:36:58 crc kubenswrapper[4611]: I0929 14:36:58.592890 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd8c5df8d-75xgv_80990670-7269-474f-8fe7-dfdb646689b4/prometheus-operator-admission-webhook/0.log" Sep 29 14:36:58 crc kubenswrapper[4611]: I0929 14:36:58.660358 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd8c5df8d-7scqx_bedf6678-a16d-4714-8f65-b865c1bc9b16/prometheus-operator-admission-webhook/0.log" Sep 29 14:36:58 crc kubenswrapper[4611]: I0929 14:36:58.852271 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-9k89j_9d8f225d-549f-436f-98d9-9dc43b350002/operator/0.log" Sep 29 14:36:58 crc kubenswrapper[4611]: I0929 14:36:58.865170 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-4jn8b_a0f95584-7024-474f-a6d5-5d4a409e4db4/perses-operator/0.log" Sep 29 14:37:04 crc kubenswrapper[4611]: I0929 14:37:04.628739 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:37:04 crc kubenswrapper[4611]: I0929 14:37:04.629343 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:37:34 crc kubenswrapper[4611]: I0929 14:37:34.628170 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:37:34 crc kubenswrapper[4611]: I0929 14:37:34.628722 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:37:34 crc kubenswrapper[4611]: I0929 14:37:34.631286 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:37:34 crc kubenswrapper[4611]: I0929 14:37:34.634065 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6e19b9055e3927d864ff4a7206886b48ae17d1ca0003008946b90b7c04db8e9b"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:37:34 crc kubenswrapper[4611]: I0929 14:37:34.635295 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://6e19b9055e3927d864ff4a7206886b48ae17d1ca0003008946b90b7c04db8e9b" gracePeriod=600 Sep 29 14:37:35 crc kubenswrapper[4611]: I0929 14:37:35.610788 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="6e19b9055e3927d864ff4a7206886b48ae17d1ca0003008946b90b7c04db8e9b" exitCode=0 Sep 29 14:37:35 crc kubenswrapper[4611]: I0929 14:37:35.611331 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"6e19b9055e3927d864ff4a7206886b48ae17d1ca0003008946b90b7c04db8e9b"} Sep 29 14:37:35 crc kubenswrapper[4611]: I0929 14:37:35.612571 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerStarted","Data":"691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630"} Sep 29 14:37:35 crc kubenswrapper[4611]: I0929 14:37:35.615531 4611 scope.go:117] "RemoveContainer" containerID="55544537b23786c645dcbd7e168d99ed84f1c15c375788ee65de4326f33720a7" Sep 29 14:38:40 crc kubenswrapper[4611]: I0929 14:38:40.464326 4611 scope.go:117] "RemoveContainer" containerID="1b7ba23579c77faa8929fb0686fddce9763028cf27c82582676f3683067e4306" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.476299 4611 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kbgrf"] Sep 29 14:39:17 crc kubenswrapper[4611]: E0929 14:39:17.480395 4611 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a78741-590d-4716-bfbc-3d4a30c4fcda" containerName="container-00" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.480424 4611 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a78741-590d-4716-bfbc-3d4a30c4fcda" containerName="container-00" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.481161 4611 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a78741-590d-4716-bfbc-3d4a30c4fcda" containerName="container-00" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.487123 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.566700 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbgrf"] Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.591983 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-utilities\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.592196 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-catalog-content\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.592508 4611 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m72zf\" (UniqueName: \"kubernetes.io/projected/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-kube-api-access-m72zf\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.694875 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m72zf\" (UniqueName: \"kubernetes.io/projected/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-kube-api-access-m72zf\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.694982 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-utilities\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.695025 4611 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-catalog-content\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.695513 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-utilities\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.695526 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-catalog-content\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.726565 4611 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m72zf\" (UniqueName: \"kubernetes.io/projected/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-kube-api-access-m72zf\") pod \"redhat-marketplace-kbgrf\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:17 crc kubenswrapper[4611]: I0929 14:39:17.812240 4611 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:19 crc kubenswrapper[4611]: I0929 14:39:19.000713 4611 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbgrf"] Sep 29 14:39:19 crc kubenswrapper[4611]: I0929 14:39:19.664745 4611 generic.go:334] "Generic (PLEG): container finished" podID="2c8120ef-8e72-4e9d-9851-9ee70b7d8523" containerID="5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa" exitCode=0 Sep 29 14:39:19 crc kubenswrapper[4611]: I0929 14:39:19.664840 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbgrf" event={"ID":"2c8120ef-8e72-4e9d-9851-9ee70b7d8523","Type":"ContainerDied","Data":"5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa"} Sep 29 14:39:19 crc kubenswrapper[4611]: I0929 14:39:19.665311 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbgrf" event={"ID":"2c8120ef-8e72-4e9d-9851-9ee70b7d8523","Type":"ContainerStarted","Data":"2cd03f3b7d07af4464426e53db058b0235f5b06a8474be32d281abaf3b6c2081"} Sep 29 14:39:19 crc kubenswrapper[4611]: I0929 14:39:19.671410 4611 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:39:21 crc kubenswrapper[4611]: I0929 14:39:21.684866 4611 generic.go:334] "Generic (PLEG): container finished" podID="2c8120ef-8e72-4e9d-9851-9ee70b7d8523" containerID="7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066" exitCode=0 Sep 29 14:39:21 crc kubenswrapper[4611]: I0929 14:39:21.684962 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbgrf" event={"ID":"2c8120ef-8e72-4e9d-9851-9ee70b7d8523","Type":"ContainerDied","Data":"7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066"} Sep 29 14:39:22 crc kubenswrapper[4611]: I0929 14:39:22.697070 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbgrf" event={"ID":"2c8120ef-8e72-4e9d-9851-9ee70b7d8523","Type":"ContainerStarted","Data":"bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68"} Sep 29 14:39:22 crc kubenswrapper[4611]: I0929 14:39:22.735915 4611 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kbgrf" podStartSLOduration=3.223666777 podStartE2EDuration="5.728504401s" podCreationTimestamp="2025-09-29 14:39:17 +0000 UTC" firstStartedPulling="2025-09-29 14:39:19.667895068 +0000 UTC m=+7146.559414674" lastFinishedPulling="2025-09-29 14:39:22.172732692 +0000 UTC m=+7149.064252298" observedRunningTime="2025-09-29 14:39:22.71942804 +0000 UTC m=+7149.610947666" watchObservedRunningTime="2025-09-29 14:39:22.728504401 +0000 UTC m=+7149.620024017" Sep 29 14:39:25 crc kubenswrapper[4611]: I0929 14:39:25.729481 4611 generic.go:334] "Generic (PLEG): container finished" podID="7407eea2-def7-4a82-a48d-6043733a9faa" containerID="24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a" exitCode=0 Sep 29 14:39:25 crc kubenswrapper[4611]: I0929 14:39:25.729618 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" event={"ID":"7407eea2-def7-4a82-a48d-6043733a9faa","Type":"ContainerDied","Data":"24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a"} Sep 29 14:39:25 crc kubenswrapper[4611]: I0929 14:39:25.730465 4611 scope.go:117] "RemoveContainer" containerID="24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a" Sep 29 14:39:26 crc kubenswrapper[4611]: I0929 14:39:26.176997 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mkzpb_must-gather-jqlcx_7407eea2-def7-4a82-a48d-6043733a9faa/gather/0.log" Sep 29 14:39:27 crc kubenswrapper[4611]: I0929 14:39:27.812798 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:27 crc kubenswrapper[4611]: I0929 14:39:27.813221 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:27 crc kubenswrapper[4611]: I0929 14:39:27.889214 4611 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:28 crc kubenswrapper[4611]: I0929 14:39:28.837781 4611 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:28 crc kubenswrapper[4611]: I0929 14:39:28.894829 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbgrf"] Sep 29 14:39:30 crc kubenswrapper[4611]: I0929 14:39:30.803155 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kbgrf" podUID="2c8120ef-8e72-4e9d-9851-9ee70b7d8523" containerName="registry-server" containerID="cri-o://bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68" gracePeriod=2 Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.330416 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.478802 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-utilities\") pod \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.478901 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-catalog-content\") pod \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.478952 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m72zf\" (UniqueName: \"kubernetes.io/projected/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-kube-api-access-m72zf\") pod \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\" (UID: \"2c8120ef-8e72-4e9d-9851-9ee70b7d8523\") " Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.481524 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-utilities" (OuterVolumeSpecName: "utilities") pod "2c8120ef-8e72-4e9d-9851-9ee70b7d8523" (UID: "2c8120ef-8e72-4e9d-9851-9ee70b7d8523"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.495201 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c8120ef-8e72-4e9d-9851-9ee70b7d8523" (UID: "2c8120ef-8e72-4e9d-9851-9ee70b7d8523"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.496581 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-kube-api-access-m72zf" (OuterVolumeSpecName: "kube-api-access-m72zf") pod "2c8120ef-8e72-4e9d-9851-9ee70b7d8523" (UID: "2c8120ef-8e72-4e9d-9851-9ee70b7d8523"). InnerVolumeSpecName "kube-api-access-m72zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.582914 4611 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.582944 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m72zf\" (UniqueName: \"kubernetes.io/projected/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-kube-api-access-m72zf\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.582955 4611 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c8120ef-8e72-4e9d-9851-9ee70b7d8523-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.817120 4611 generic.go:334] "Generic (PLEG): container finished" podID="2c8120ef-8e72-4e9d-9851-9ee70b7d8523" containerID="bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68" exitCode=0 Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.817314 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbgrf" event={"ID":"2c8120ef-8e72-4e9d-9851-9ee70b7d8523","Type":"ContainerDied","Data":"bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68"} Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.817395 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbgrf" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.817401 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbgrf" event={"ID":"2c8120ef-8e72-4e9d-9851-9ee70b7d8523","Type":"ContainerDied","Data":"2cd03f3b7d07af4464426e53db058b0235f5b06a8474be32d281abaf3b6c2081"} Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.817428 4611 scope.go:117] "RemoveContainer" containerID="bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.848713 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbgrf"] Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.864965 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbgrf"] Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.869358 4611 scope.go:117] "RemoveContainer" containerID="7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.910790 4611 scope.go:117] "RemoveContainer" containerID="5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.948082 4611 scope.go:117] "RemoveContainer" containerID="bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68" Sep 29 14:39:31 crc kubenswrapper[4611]: E0929 14:39:31.950293 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68\": container with ID starting with bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68 not found: ID does not exist" containerID="bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.950332 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68"} err="failed to get container status \"bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68\": rpc error: code = NotFound desc = could not find container \"bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68\": container with ID starting with bc3acf9b610007167d8637ba2a44798dc6cbaab3caee06f92203764113f43f68 not found: ID does not exist" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.950395 4611 scope.go:117] "RemoveContainer" containerID="7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066" Sep 29 14:39:31 crc kubenswrapper[4611]: E0929 14:39:31.950865 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066\": container with ID starting with 7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066 not found: ID does not exist" containerID="7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.950910 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066"} err="failed to get container status \"7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066\": rpc error: code = NotFound desc = could not find container \"7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066\": container with ID starting with 7b7662aba170cb0301766a2c64c79fceeb9552253194ffe866eed04519a71066 not found: ID does not exist" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.950928 4611 scope.go:117] "RemoveContainer" containerID="5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa" Sep 29 14:39:31 crc kubenswrapper[4611]: E0929 14:39:31.951262 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa\": container with ID starting with 5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa not found: ID does not exist" containerID="5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa" Sep 29 14:39:31 crc kubenswrapper[4611]: I0929 14:39:31.951285 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa"} err="failed to get container status \"5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa\": rpc error: code = NotFound desc = could not find container \"5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa\": container with ID starting with 5825e07fb6bffe3cfe19170929444913b07fbc77be7a3aeffb8e065f1d84a8aa not found: ID does not exist" Sep 29 14:39:33 crc kubenswrapper[4611]: I0929 14:39:33.751060 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c8120ef-8e72-4e9d-9851-9ee70b7d8523" path="/var/lib/kubelet/pods/2c8120ef-8e72-4e9d-9851-9ee70b7d8523/volumes" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.101817 4611 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mkzpb/must-gather-jqlcx"] Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.105170 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" podUID="7407eea2-def7-4a82-a48d-6043733a9faa" containerName="copy" containerID="cri-o://b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7" gracePeriod=2 Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.112388 4611 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mkzpb/must-gather-jqlcx"] Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.623253 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mkzpb_must-gather-jqlcx_7407eea2-def7-4a82-a48d-6043733a9faa/copy/0.log" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.624107 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.747669 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7407eea2-def7-4a82-a48d-6043733a9faa-must-gather-output\") pod \"7407eea2-def7-4a82-a48d-6043733a9faa\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.747974 4611 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7rh9\" (UniqueName: \"kubernetes.io/projected/7407eea2-def7-4a82-a48d-6043733a9faa-kube-api-access-z7rh9\") pod \"7407eea2-def7-4a82-a48d-6043733a9faa\" (UID: \"7407eea2-def7-4a82-a48d-6043733a9faa\") " Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.761957 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7407eea2-def7-4a82-a48d-6043733a9faa-kube-api-access-z7rh9" (OuterVolumeSpecName: "kube-api-access-z7rh9") pod "7407eea2-def7-4a82-a48d-6043733a9faa" (UID: "7407eea2-def7-4a82-a48d-6043733a9faa"). InnerVolumeSpecName "kube-api-access-z7rh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.851454 4611 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7rh9\" (UniqueName: \"kubernetes.io/projected/7407eea2-def7-4a82-a48d-6043733a9faa-kube-api-access-z7rh9\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.919225 4611 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mkzpb_must-gather-jqlcx_7407eea2-def7-4a82-a48d-6043733a9faa/copy/0.log" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.919976 4611 generic.go:334] "Generic (PLEG): container finished" podID="7407eea2-def7-4a82-a48d-6043733a9faa" containerID="b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7" exitCode=143 Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.920126 4611 scope.go:117] "RemoveContainer" containerID="b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.920127 4611 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mkzpb/must-gather-jqlcx" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.961948 4611 scope.go:117] "RemoveContainer" containerID="24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a" Sep 29 14:39:38 crc kubenswrapper[4611]: I0929 14:39:38.963675 4611 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7407eea2-def7-4a82-a48d-6043733a9faa-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "7407eea2-def7-4a82-a48d-6043733a9faa" (UID: "7407eea2-def7-4a82-a48d-6043733a9faa"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:39:39 crc kubenswrapper[4611]: I0929 14:39:39.045220 4611 scope.go:117] "RemoveContainer" containerID="b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7" Sep 29 14:39:39 crc kubenswrapper[4611]: E0929 14:39:39.045590 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7\": container with ID starting with b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7 not found: ID does not exist" containerID="b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7" Sep 29 14:39:39 crc kubenswrapper[4611]: I0929 14:39:39.045612 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7"} err="failed to get container status \"b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7\": rpc error: code = NotFound desc = could not find container \"b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7\": container with ID starting with b4218376f697c85548d49fc3a7e444bdc90214d6562a2e45e0aa40006edcbef7 not found: ID does not exist" Sep 29 14:39:39 crc kubenswrapper[4611]: I0929 14:39:39.045664 4611 scope.go:117] "RemoveContainer" containerID="24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a" Sep 29 14:39:39 crc kubenswrapper[4611]: E0929 14:39:39.045928 4611 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a\": container with ID starting with 24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a not found: ID does not exist" containerID="24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a" Sep 29 14:39:39 crc kubenswrapper[4611]: I0929 14:39:39.045942 4611 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a"} err="failed to get container status \"24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a\": rpc error: code = NotFound desc = could not find container \"24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a\": container with ID starting with 24f99b5b2788ffd57f34f07eeed375091aac694b8e2b15661046705ae566f37a not found: ID does not exist" Sep 29 14:39:39 crc kubenswrapper[4611]: I0929 14:39:39.055973 4611 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7407eea2-def7-4a82-a48d-6043733a9faa-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:39 crc kubenswrapper[4611]: I0929 14:39:39.748002 4611 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7407eea2-def7-4a82-a48d-6043733a9faa" path="/var/lib/kubelet/pods/7407eea2-def7-4a82-a48d-6043733a9faa/volumes" Sep 29 14:40:04 crc kubenswrapper[4611]: I0929 14:40:04.629079 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:40:04 crc kubenswrapper[4611]: I0929 14:40:04.629619 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:40:34 crc kubenswrapper[4611]: I0929 14:40:34.628575 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:40:34 crc kubenswrapper[4611]: I0929 14:40:34.629144 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.628370 4611 patch_prober.go:28] interesting pod/machine-config-daemon-d2gnq container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.629110 4611 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.629162 4611 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.629902 4611 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630"} pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.629951 4611 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" containerName="machine-config-daemon" containerID="cri-o://691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630" gracePeriod=600 Sep 29 14:41:04 crc kubenswrapper[4611]: E0929 14:41:04.756742 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.762005 4611 generic.go:334] "Generic (PLEG): container finished" podID="9fea0777-8bbe-4100-806a-2580c80c902c" containerID="691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630" exitCode=0 Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.762062 4611 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" event={"ID":"9fea0777-8bbe-4100-806a-2580c80c902c","Type":"ContainerDied","Data":"691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630"} Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.762100 4611 scope.go:117] "RemoveContainer" containerID="6e19b9055e3927d864ff4a7206886b48ae17d1ca0003008946b90b7c04db8e9b" Sep 29 14:41:04 crc kubenswrapper[4611]: I0929 14:41:04.762940 4611 scope.go:117] "RemoveContainer" containerID="691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630" Sep 29 14:41:04 crc kubenswrapper[4611]: E0929 14:41:04.763312 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" Sep 29 14:41:15 crc kubenswrapper[4611]: I0929 14:41:15.740354 4611 scope.go:117] "RemoveContainer" containerID="691985bb48be890d6d5249a91db8b4128a69b3e748592770152632e54073d630" Sep 29 14:41:15 crc kubenswrapper[4611]: E0929 14:41:15.741014 4611 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d2gnq_openshift-machine-config-operator(9fea0777-8bbe-4100-806a-2580c80c902c)\"" pod="openshift-machine-config-operator/machine-config-daemon-d2gnq" podUID="9fea0777-8bbe-4100-806a-2580c80c902c" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066515223024452 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066515224017370 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066476442016523 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066476443015474 5ustar corecore